mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-23 07:09:31 +00:00
Merge branch 'master' into pprados/06-pdfplumber
This commit is contained in:
commit
13dad044b4
3
Makefile
3
Makefile
@ -83,3 +83,6 @@ lint lint_package lint_tests:
|
||||
format format_diff:
|
||||
uv run --group lint ruff format docs cookbook
|
||||
uv run --group lint ruff check --select I --fix docs cookbook
|
||||
|
||||
update-package-downloads:
|
||||
uv run python docs/scripts/packages_yml_get_downloads.py
|
||||
|
@ -21,7 +21,6 @@ Notebook | Description
|
||||
[code-analysis-deeplake.ipynb](https://github.com/langchain-ai/langchain/tree/master/cookbook/code-analysis-deeplake.ipynb) | Analyze its own code base with the help of gpt and activeloop's deep lake.
|
||||
[custom_agent_with_plugin_retri...](https://github.com/langchain-ai/langchain/tree/master/cookbook/custom_agent_with_plugin_retrieval.ipynb) | Build a custom agent that can interact with ai plugins by retrieving tools and creating natural language wrappers around openapi endpoints.
|
||||
[custom_agent_with_plugin_retri...](https://github.com/langchain-ai/langchain/tree/master/cookbook/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb) | Build a custom agent with plugin retrieval functionality, utilizing ai plugins from the `plugnplai` directory.
|
||||
[databricks_sql_db.ipynb](https://github.com/langchain-ai/langchain/tree/master/cookbook/databricks_sql_db.ipynb) | Connect to databricks runtimes and databricks sql.
|
||||
[deeplake_semantic_search_over_...](https://github.com/langchain-ai/langchain/tree/master/cookbook/deeplake_semantic_search_over_chat.ipynb) | Perform semantic search and question-answering over a group chat using activeloop's deep lake with gpt4.
|
||||
[elasticsearch_db_qa.ipynb](https://github.com/langchain-ai/langchain/tree/master/cookbook/elasticsearch_db_qa.ipynb) | Interact with elasticsearch analytics databases in natural language and build search queries via the elasticsearch dsl API.
|
||||
[extraction_openai_tools.ipynb](https://github.com/langchain-ai/langchain/tree/master/cookbook/extraction_openai_tools.ipynb) | Structured Data Extraction with OpenAI Tools
|
||||
|
@ -1,273 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "707d13a7",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Databricks\n",
|
||||
"\n",
|
||||
"This notebook covers how to connect to the [Databricks runtimes](https://docs.databricks.com/runtime/index.html) and [Databricks SQL](https://www.databricks.com/product/databricks-sql) using the SQLDatabase wrapper of LangChain.\n",
|
||||
"It is broken into 3 parts: installation and setup, connecting to Databricks, and examples."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "0076d072",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Installation and Setup"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "739b489b",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"!pip install databricks-sql-connector"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "73113163",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Connecting to Databricks\n",
|
||||
"\n",
|
||||
"You can connect to [Databricks runtimes](https://docs.databricks.com/runtime/index.html) and [Databricks SQL](https://www.databricks.com/product/databricks-sql) using the `SQLDatabase.from_databricks()` method.\n",
|
||||
"\n",
|
||||
"### Syntax\n",
|
||||
"```python\n",
|
||||
"SQLDatabase.from_databricks(\n",
|
||||
" catalog: str,\n",
|
||||
" schema: str,\n",
|
||||
" host: Optional[str] = None,\n",
|
||||
" api_token: Optional[str] = None,\n",
|
||||
" warehouse_id: Optional[str] = None,\n",
|
||||
" cluster_id: Optional[str] = None,\n",
|
||||
" engine_args: Optional[dict] = None,\n",
|
||||
" **kwargs: Any)\n",
|
||||
"```\n",
|
||||
"### Required Parameters\n",
|
||||
"* `catalog`: The catalog name in the Databricks database.\n",
|
||||
"* `schema`: The schema name in the catalog.\n",
|
||||
"\n",
|
||||
"### Optional Parameters\n",
|
||||
"There following parameters are optional. When executing the method in a Databricks notebook, you don't need to provide them in most of the cases.\n",
|
||||
"* `host`: The Databricks workspace hostname, excluding 'https://' part. Defaults to 'DATABRICKS_HOST' environment variable or current workspace if in a Databricks notebook.\n",
|
||||
"* `api_token`: The Databricks personal access token for accessing the Databricks SQL warehouse or the cluster. Defaults to 'DATABRICKS_TOKEN' environment variable or a temporary one is generated if in a Databricks notebook.\n",
|
||||
"* `warehouse_id`: The warehouse ID in the Databricks SQL.\n",
|
||||
"* `cluster_id`: The cluster ID in the Databricks Runtime. If running in a Databricks notebook and both 'warehouse_id' and 'cluster_id' are None, it uses the ID of the cluster the notebook is attached to.\n",
|
||||
"* `engine_args`: The arguments to be used when connecting Databricks.\n",
|
||||
"* `**kwargs`: Additional keyword arguments for the `SQLDatabase.from_uri` method."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "b11c7e48",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Examples"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "8102bca0",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Connecting to Databricks with SQLDatabase wrapper\n",
|
||||
"from langchain_community.utilities import SQLDatabase\n",
|
||||
"\n",
|
||||
"db = SQLDatabase.from_databricks(catalog=\"samples\", schema=\"nyctaxi\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "9dd36f58",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Creating a OpenAI Chat LLM wrapper\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(temperature=0, model_name=\"gpt-4\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "5b5c5f1a",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### SQL Chain example\n",
|
||||
"\n",
|
||||
"This example demonstrates the use of the [SQL Chain](https://python.langchain.com/en/latest/modules/chains/examples/sqlite.html) for answering a question over a Databricks database."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "36f2270b",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.utilities import SQLDatabaseChain\n",
|
||||
"\n",
|
||||
"db_chain = SQLDatabaseChain.from_llm(llm, db, verbose=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "4e2b5f25",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\n",
|
||||
"\u001b[1m> Entering new SQLDatabaseChain chain...\u001b[0m\n",
|
||||
"What is the average duration of taxi rides that start between midnight and 6am?\n",
|
||||
"SQLQuery:\u001b[32;1m\u001b[1;3mSELECT AVG(UNIX_TIMESTAMP(tpep_dropoff_datetime) - UNIX_TIMESTAMP(tpep_pickup_datetime)) as avg_duration\n",
|
||||
"FROM trips\n",
|
||||
"WHERE HOUR(tpep_pickup_datetime) >= 0 AND HOUR(tpep_pickup_datetime) < 6\u001b[0m\n",
|
||||
"SQLResult: \u001b[33;1m\u001b[1;3m[(987.8122786304605,)]\u001b[0m\n",
|
||||
"Answer:\u001b[32;1m\u001b[1;3mThe average duration of taxi rides that start between midnight and 6am is 987.81 seconds.\u001b[0m\n",
|
||||
"\u001b[1m> Finished chain.\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'The average duration of taxi rides that start between midnight and 6am is 987.81 seconds.'"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"db_chain.run(\n",
|
||||
" \"What is the average duration of taxi rides that start between midnight and 6am?\"\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "e496d5e5",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### SQL Database Agent example\n",
|
||||
"\n",
|
||||
"This example demonstrates the use of the [SQL Database Agent](/docs/integrations/tools/sql_database) for answering questions over a Databricks database."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "9918e86a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import create_sql_agent\n",
|
||||
"from langchain_community.agent_toolkits import SQLDatabaseToolkit\n",
|
||||
"\n",
|
||||
"toolkit = SQLDatabaseToolkit(db=db, llm=llm)\n",
|
||||
"agent = create_sql_agent(llm=llm, toolkit=toolkit, verbose=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "c484a76e",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\n",
|
||||
"\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n",
|
||||
"\u001b[32;1m\u001b[1;3mAction: list_tables_sql_db\n",
|
||||
"Action Input: \u001b[0m\n",
|
||||
"Observation: \u001b[38;5;200m\u001b[1;3mtrips\u001b[0m\n",
|
||||
"Thought:\u001b[32;1m\u001b[1;3mI should check the schema of the trips table to see if it has the necessary columns for trip distance and duration.\n",
|
||||
"Action: schema_sql_db\n",
|
||||
"Action Input: trips\u001b[0m\n",
|
||||
"Observation: \u001b[33;1m\u001b[1;3m\n",
|
||||
"CREATE TABLE trips (\n",
|
||||
"\ttpep_pickup_datetime TIMESTAMP, \n",
|
||||
"\ttpep_dropoff_datetime TIMESTAMP, \n",
|
||||
"\ttrip_distance FLOAT, \n",
|
||||
"\tfare_amount FLOAT, \n",
|
||||
"\tpickup_zip INT, \n",
|
||||
"\tdropoff_zip INT\n",
|
||||
") USING DELTA\n",
|
||||
"\n",
|
||||
"/*\n",
|
||||
"3 rows from trips table:\n",
|
||||
"tpep_pickup_datetime\ttpep_dropoff_datetime\ttrip_distance\tfare_amount\tpickup_zip\tdropoff_zip\n",
|
||||
"2016-02-14 16:52:13+00:00\t2016-02-14 17:16:04+00:00\t4.94\t19.0\t10282\t10171\n",
|
||||
"2016-02-04 18:44:19+00:00\t2016-02-04 18:46:00+00:00\t0.28\t3.5\t10110\t10110\n",
|
||||
"2016-02-17 17:13:57+00:00\t2016-02-17 17:17:55+00:00\t0.7\t5.0\t10103\t10023\n",
|
||||
"*/\u001b[0m\n",
|
||||
"Thought:\u001b[32;1m\u001b[1;3mThe trips table has the necessary columns for trip distance and duration. I will write a query to find the longest trip distance and its duration.\n",
|
||||
"Action: query_checker_sql_db\n",
|
||||
"Action Input: SELECT trip_distance, tpep_dropoff_datetime - tpep_pickup_datetime as duration FROM trips ORDER BY trip_distance DESC LIMIT 1\u001b[0m\n",
|
||||
"Observation: \u001b[31;1m\u001b[1;3mSELECT trip_distance, tpep_dropoff_datetime - tpep_pickup_datetime as duration FROM trips ORDER BY trip_distance DESC LIMIT 1\u001b[0m\n",
|
||||
"Thought:\u001b[32;1m\u001b[1;3mThe query is correct. I will now execute it to find the longest trip distance and its duration.\n",
|
||||
"Action: query_sql_db\n",
|
||||
"Action Input: SELECT trip_distance, tpep_dropoff_datetime - tpep_pickup_datetime as duration FROM trips ORDER BY trip_distance DESC LIMIT 1\u001b[0m\n",
|
||||
"Observation: \u001b[36;1m\u001b[1;3m[(30.6, '0 00:43:31.000000000')]\u001b[0m\n",
|
||||
"Thought:\u001b[32;1m\u001b[1;3mI now know the final answer.\n",
|
||||
"Final Answer: The longest trip distance is 30.6 miles and it took 43 minutes and 31 seconds.\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[1m> Finished chain.\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'The longest trip distance is 30.6 miles and it took 43 minutes and 31 seconds.'"
|
||||
]
|
||||
},
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"agent.run(\"What is the longest trip distance and how long did it take?\")"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
206
docs/docs/integrations/chat/abso.ipynb
Normal file
206
docs/docs/integrations/chat/abso.ipynb
Normal file
@ -0,0 +1,206 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "raw",
|
||||
"id": "afaf8039",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"---\n",
|
||||
"sidebar_label: Abso\n",
|
||||
"---"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "e49f1e0d",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# ChatAbso\n",
|
||||
"\n",
|
||||
"This will help you getting started with ChatAbso [chat models](https://python.langchain.com/docs/concepts/chat_models/). For detailed documentation of all ChatAbso features and configurations head to the [API reference](https://python.langchain.com/api_reference/en/latest/chat_models/langchain_abso.chat_models.ChatAbso.html).\n",
|
||||
"\n",
|
||||
"- You can find the full documentation for the Abso router [here] (https://abso.ai)\n",
|
||||
"\n",
|
||||
"## Overview\n",
|
||||
"### Integration details\n",
|
||||
"\n",
|
||||
"| Class | Package | Local | Serializable | [JS support](https://js.langchain.com/docs/integrations/chat/abso) | Package downloads | Package latest |\n",
|
||||
"| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n",
|
||||
"| [ChatAbso](https://python.langchain.com/api_reference/en/latest/chat_models/langchain_abso.chat_models.ChatAbso.html) | [langchain-abso](https://python.langchain.com/api_reference/en/latest/abso_api_reference.html) | ❌ | ❌ | ❌ |  |  |\n",
|
||||
"\n",
|
||||
"## Setup\n",
|
||||
"To access ChatAbso models you'll need to create an OpenAI account, get an API key, and install the `langchain-abso` integration package.\n",
|
||||
"\n",
|
||||
"### Credentials\n",
|
||||
"\n",
|
||||
"- TODO: Update with relevant info.\n",
|
||||
"\n",
|
||||
"Head to (TODO: link) to sign up to ChatAbso and generate an API key. Once you've done this set the ABSO_API_KEY environment variable:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "433e8d2b-9519-4b49-b2c4-7ab65b046c94",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"if not os.getenv(\"OPENAI_API_KEY\"):\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"Enter your OpenAI API key: \")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "0730d6a1-c893-4840-9817-5e5251676d5d",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Installation\n",
|
||||
"\n",
|
||||
"The LangChain ChatAbso integration lives in the `langchain-abso` package:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "652d6238-1f87-422a-b135-f5abbb8652fc",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install -qU langchain-abso"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "a38cde65-254d-4219-a441-068766c0d4b5",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Instantiation\n",
|
||||
"\n",
|
||||
"Now we can instantiate our model object and generate chat completions:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "cb09c344-1836-4e0c-acf8-11d13ac1dbae",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_abso import ChatAbso\n",
|
||||
"\n",
|
||||
"llm = ChatAbso(fast_model=\"gpt-4o\", slow_model=\"o3-mini\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "2b4f3e15",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Invocation\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "62e0dbc3",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"messages = [\n",
|
||||
" (\n",
|
||||
" \"system\",\n",
|
||||
" \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n",
|
||||
" ),\n",
|
||||
" (\"human\", \"I love programming.\"),\n",
|
||||
"]\n",
|
||||
"ai_msg = llm.invoke(messages)\n",
|
||||
"ai_msg"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d86145b3-bfef-46e8-b227-4dda5c9c2705",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(ai_msg.content)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "18e2bfc0-7e78-4528-a73f-499ac150dca8",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Chaining\n",
|
||||
"\n",
|
||||
"We can [chain](/docs/how_to/sequence/) our model with a prompt template like so:\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"\n",
|
||||
"prompt = ChatPromptTemplate(\n",
|
||||
" [\n",
|
||||
" (\n",
|
||||
" \"system\",\n",
|
||||
" \"You are a helpful assistant that translates {input_language} to {output_language}.\",\n",
|
||||
" ),\n",
|
||||
" (\"human\", \"{input}\"),\n",
|
||||
" ]\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"chain = prompt | llm\n",
|
||||
"chain.invoke(\n",
|
||||
" {\n",
|
||||
" \"input_language\": \"English\",\n",
|
||||
" \"output_language\": \"German\",\n",
|
||||
" \"input\": \"I love programming.\",\n",
|
||||
" }\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "3a5bb5ca-c3ae-4a58-be67-2cd18574b9a3",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## API reference\n",
|
||||
"\n",
|
||||
"For detailed documentation of all ChatAbso features and configurations head to the API reference: https://python.langchain.com/api_reference/en/latest/chat_models/langchain_abso.chat_models.ChatAbso.html"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
@ -210,7 +210,7 @@
|
||||
"id": "96ed13d4",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Instead of `model_id`, you can also pass the `deployment_id` of the previously tuned model. The entire model tuning workflow is described in [Working with TuneExperiment and PromptTuner](https://ibm.github.io/watsonx-ai-python-sdk/pt_working_with_class_and_prompt_tuner.html)."
|
||||
"Instead of `model_id`, you can also pass the `deployment_id` of the previously [deployed model with reference to a Prompt Template](https://cloud.ibm.com/apidocs/watsonx-ai#deployments-text-chat)."
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -228,6 +228,31 @@
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "3d29767c",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"For certain requirements, there is an option to pass the IBM's [`APIClient`](https://ibm.github.io/watsonx-ai-python-sdk/base.html#apiclient) object into the `ChatWatsonx` class."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "0ae9531e",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from ibm_watsonx_ai import APIClient\n",
|
||||
"\n",
|
||||
"api_client = APIClient(...)\n",
|
||||
"\n",
|
||||
"chat = ChatWatsonx(\n",
|
||||
" model_id=\"ibm/granite-34b-code-instruct\",\n",
|
||||
" watsonx_client=api_client,\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "f571001d",
|
||||
@ -448,9 +473,7 @@
|
||||
"source": [
|
||||
"## Tool calling\n",
|
||||
"\n",
|
||||
"### ChatWatsonx.bind_tools()\n",
|
||||
"\n",
|
||||
"Please note that `ChatWatsonx.bind_tools` is on beta state, so we recommend using `mistralai/mistral-large` model."
|
||||
"### ChatWatsonx.bind_tools()"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -563,7 +586,7 @@
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"display_name": "langchain_ibm",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
|
@ -19,7 +19,7 @@
|
||||
"source": [
|
||||
"# ChatSambaNovaCloud\n",
|
||||
"\n",
|
||||
"This will help you getting started with SambaNovaCloud [chat models](/docs/concepts/chat_models/). For detailed documentation of all ChatSambaNovaCloud features and configurations head to the [API reference](https://python.langchain.com/api_reference/sambanova/chat_models/langchain_sambanova.ChatSambaNovaCloud.html).\n",
|
||||
"This will help you getting started with SambaNovaCloud [chat models](/docs/concepts/chat_models/). For detailed documentation of all ChatSambaNovaCloud features and configurations head to the [API reference](https://docs.sambanova.ai/cloud/docs/get-started/overview).\n",
|
||||
"\n",
|
||||
"**[SambaNova](https://sambanova.ai/)'s** [SambaNova Cloud](https://cloud.sambanova.ai/) is a platform for performing inference with open-source models\n",
|
||||
"\n",
|
||||
@ -28,7 +28,7 @@
|
||||
"\n",
|
||||
"| Class | Package | Local | Serializable | JS support | Package downloads | Package latest |\n",
|
||||
"| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n",
|
||||
"| [ChatSambaNovaCloud](https://python.langchain.com/api_reference/sambanova/chat_models/langchain_sambanova.ChatSambaNovaCloud.html) | [langchain-community](https://python.langchain.com/api_reference/community/index.html) | ❌ | ❌ | ❌ |  |  |\n",
|
||||
"| [ChatSambaNovaCloud](https://docs.sambanova.ai/cloud/docs/get-started/overview) | [langchain-sambanova](https://python.langchain.com/docs/integrations/providers/sambanova/) | ❌ | ❌ | ❌ |  |  |\n",
|
||||
"\n",
|
||||
"### Model features\n",
|
||||
"\n",
|
||||
@ -545,7 +545,7 @@
|
||||
"source": [
|
||||
"## API reference\n",
|
||||
"\n",
|
||||
"For detailed documentation of all ChatSambaNovaCloud features and configurations head to the API reference: https://python.langchain.com/api_reference/sambanova/chat_models/langchain_sambanova.ChatSambaNovaCloud.html"
|
||||
"For detailed documentation of all SambaNovaCloud features and configurations head to the API reference: https://docs.sambanova.ai/cloud/docs/get-started/overview"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
@ -19,7 +19,7 @@
|
||||
"source": [
|
||||
"# ChatSambaStudio\n",
|
||||
"\n",
|
||||
"This will help you getting started with SambaStudio [chat models](/docs/concepts/chat_models). For detailed documentation of all ChatStudio features and configurations head to the [API reference](https://python.langchain.com/api_reference/sambanova/chat_models/langchain_sambanova.chat_models.sambanova.ChatSambaStudio.html).\n",
|
||||
"This will help you getting started with SambaStudio [chat models](/docs/concepts/chat_models). For detailed documentation of all ChatStudio features and configurations head to the [API reference](https://docs.sambanova.ai/sambastudio/latest/index.html).\n",
|
||||
"\n",
|
||||
"**[SambaNova](https://sambanova.ai/)'s** [SambaStudio](https://docs.sambanova.ai/sambastudio/latest/sambastudio-intro.html) SambaStudio is a rich, GUI-based platform that provides the functionality to train, deploy, and manage models in SambaNova [DataScale](https://sambanova.ai/products/datascale) systems.\n",
|
||||
"\n",
|
||||
@ -28,7 +28,7 @@
|
||||
"\n",
|
||||
"| Class | Package | Local | Serializable | JS support | Package downloads | Package latest |\n",
|
||||
"| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n",
|
||||
"| [ChatSambaStudio](https://python.langchain.com/api_reference/sambanova/chat_models/langchain_sambanova.chat_models.sambanova.ChatSambaStudio.html) | [langchain-community](https://python.langchain.com/api_reference/community/index.html) | ❌ | ❌ | ❌ |  |  |\n",
|
||||
"| [ChatSambaStudio](https://docs.sambanova.ai/sambastudio/latest/index.html) | [langchain-sambanova](https://python.langchain.com/docs/integrations/providers/sambanova/) | ❌ | ❌ | ❌ |  |  |\n",
|
||||
"\n",
|
||||
"### Model features\n",
|
||||
"\n",
|
||||
@ -483,7 +483,7 @@
|
||||
"source": [
|
||||
"## API reference\n",
|
||||
"\n",
|
||||
"For detailed documentation of all ChatSambaStudio features and configurations head to the API reference: https://python.langchain.com/api_reference/sambanova/chat_models/langchain_sambanova.sambanova.chat_models.ChatSambaStudio.html"
|
||||
"For detailed documentation of all SambaStudio features and configurations head to the API reference: https://docs.sambanova.ai/sambastudio/latest/api-ref-landing.html"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
@ -2,7 +2,9 @@
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"id": "xwiDq5fOuoRn"
|
||||
},
|
||||
"source": [
|
||||
"# Apify Dataset\n",
|
||||
"\n",
|
||||
@ -20,33 +22,63 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "qRW2-mokuoRp",
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install --upgrade --quiet apify-client"
|
||||
"%pip install --upgrade --quiet langchain langchain-apify langchain-openai"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"id": "8jRVq16LuoRq"
|
||||
},
|
||||
"source": [
|
||||
"First, import `ApifyDatasetLoader` into your source code:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"execution_count": 2,
|
||||
"metadata": {
|
||||
"id": "umXQHqIJuoRq"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.document_loaders import ApifyDatasetLoader\n",
|
||||
"from langchain_apify import ApifyDatasetLoader\n",
|
||||
"from langchain_core.documents import Document"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"id": "NjGwKy59vz1X"
|
||||
},
|
||||
"source": [
|
||||
"Find your [Apify API token](https://console.apify.com/account/integrations) and [OpenAI API key](https://platform.openai.com/account/api-keys) and initialize these into environment variable:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {
|
||||
"id": "AvzNtyCxwDdr"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"APIFY_API_TOKEN\"] = \"your-apify-api-token\"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = \"your-openai-api-key\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "d1O-KL48uoRr"
|
||||
},
|
||||
"source": [
|
||||
"Then provide a function that maps Apify dataset record fields to LangChain `Document` format.\n",
|
||||
"\n",
|
||||
@ -64,8 +96,10 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"execution_count": 8,
|
||||
"metadata": {
|
||||
"id": "m1SpA7XZuoRr"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"loader = ApifyDatasetLoader(\n",
|
||||
@ -78,8 +112,10 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"execution_count": 9,
|
||||
"metadata": {
|
||||
"id": "0hWX7ABsuoRs"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"data = loader.load()"
|
||||
@ -87,7 +123,9 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"id": "EJCVFVKNuoRs"
|
||||
},
|
||||
"source": [
|
||||
"## An example with question answering\n",
|
||||
"\n",
|
||||
@ -96,21 +134,26 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"execution_count": 14,
|
||||
"metadata": {
|
||||
"id": "sNisJKzZuoRt"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.indexes import VectorstoreIndexCreator\n",
|
||||
"from langchain_community.utilities import ApifyWrapper\n",
|
||||
"from langchain_apify import ApifyWrapper\n",
|
||||
"from langchain_core.documents import Document\n",
|
||||
"from langchain_openai import OpenAI\n",
|
||||
"from langchain_core.vectorstores import InMemoryVectorStore\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"from langchain_openai.embeddings import OpenAIEmbeddings"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"execution_count": 15,
|
||||
"metadata": {
|
||||
"id": "qcfmnbdDuoRu"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"loader = ApifyDatasetLoader(\n",
|
||||
@ -123,27 +166,47 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"execution_count": 16,
|
||||
"metadata": {
|
||||
"id": "8b0xzKJxuoRv"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"index = VectorstoreIndexCreator(embedding=OpenAIEmbeddings()).from_loaders([loader])"
|
||||
"index = VectorstoreIndexCreator(\n",
|
||||
" vectorstore_cls=InMemoryVectorStore, embedding=OpenAIEmbeddings()\n",
|
||||
").from_loaders([loader])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"execution_count": 17,
|
||||
"metadata": {
|
||||
"id": "7zPXGsVFwUGA"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"llm = ChatOpenAI(model=\"gpt-4o-mini\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 23,
|
||||
"metadata": {
|
||||
"id": "ecWrdM4guoRv"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"query = \"What is Apify?\"\n",
|
||||
"result = index.query_with_sources(query, llm=OpenAI())"
|
||||
"result = index.query_with_sources(query, llm=llm)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "QH8r44e9uoRv",
|
||||
"outputId": "361fe050-f75d-4d5a-c327-5e7bd190fba5"
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@ -162,6 +225,9 @@
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"provenance": []
|
||||
},
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
@ -181,5 +247,5 @@
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 4
|
||||
}
|
||||
"nbformat_minor": 0
|
||||
}
|
@ -195,7 +195,7 @@
|
||||
"id": "96ed13d4",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Instead of `model_id`, you can also pass the `deployment_id` of the previously tuned model. The entire model tuning workflow is described [here](https://ibm.github.io/watsonx-ai-python-sdk/pt_working_with_class_and_prompt_tuner.html)."
|
||||
"Instead of `model_id`, you can also pass the `deployment_id` of the previously tuned model. The entire model tuning workflow is described in [Working with TuneExperiment and PromptTuner](https://ibm.github.io/watsonx-ai-python-sdk/pt_tune_experiment_run.html)."
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -420,7 +420,7 @@
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"display_name": "langchain_ibm",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
|
14
docs/docs/integrations/providers/abso.md
Normal file
14
docs/docs/integrations/providers/abso.md
Normal file
@ -0,0 +1,14 @@
|
||||
# Abso
|
||||
|
||||
[Abso](https://abso.ai/#router) is an open-source LLM proxy that automatically routes requests between fast and slow models based on prompt complexity. It uses various heuristics to chose the proper model. It's very fast and has low latency.
|
||||
|
||||
|
||||
## Installation and setup
|
||||
|
||||
```bash
|
||||
pip install langchain-abso
|
||||
```
|
||||
|
||||
## Chat Model
|
||||
|
||||
See usage details [here](/docs/integrations/chat/abso)
|
@ -14,20 +14,34 @@ blogs, or knowledge bases.
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
- Install the Apify API client for Python with `pip install apify-client`
|
||||
- Install the LangChain Apify package for Python with:
|
||||
```bash
|
||||
pip install langchain-apify
|
||||
```
|
||||
- Get your [Apify API token](https://console.apify.com/account/integrations) and either set it as
|
||||
an environment variable (`APIFY_API_TOKEN`) or pass it to the `ApifyWrapper` as `apify_api_token` in the constructor.
|
||||
an environment variable (`APIFY_API_TOKEN`) or pass it as `apify_api_token` in the constructor.
|
||||
|
||||
## Tool
|
||||
|
||||
## Utility
|
||||
You can use the `ApifyActorsTool` to use Apify Actors with agents.
|
||||
|
||||
```python
|
||||
from langchain_apify import ApifyActorsTool
|
||||
```
|
||||
|
||||
See [this notebook](/docs/integrations/tools/apify_actors) for example usage.
|
||||
|
||||
For more information on how to use this tool, visit [the Apify integration documentation](https://docs.apify.com/platform/integrations/langgraph).
|
||||
|
||||
## Wrapper
|
||||
|
||||
You can use the `ApifyWrapper` to run Actors on the Apify platform.
|
||||
|
||||
```python
|
||||
from langchain_community.utilities import ApifyWrapper
|
||||
from langchain_apify import ApifyWrapper
|
||||
```
|
||||
|
||||
For more information on this wrapper, see [the API reference](https://python.langchain.com/api_reference/community/utilities/langchain_community.utilities.apify.ApifyWrapper.html).
|
||||
For more information on how to use this wrapper, see [the Apify integration documentation](https://docs.apify.com/platform/integrations/langchain).
|
||||
|
||||
|
||||
## Document loader
|
||||
@ -35,7 +49,10 @@ For more information on this wrapper, see [the API reference](https://python.lan
|
||||
You can also use our `ApifyDatasetLoader` to get data from Apify dataset.
|
||||
|
||||
```python
|
||||
from langchain_community.document_loaders import ApifyDatasetLoader
|
||||
from langchain_apify import ApifyDatasetLoader
|
||||
```
|
||||
|
||||
For a more detailed walkthrough of this loader, see [this notebook](/docs/integrations/document_loaders/apify_dataset).
|
||||
|
||||
|
||||
Source code for this integration can be found in the [LangChain Apify repository](https://github.com/apify/langchain-apify).
|
||||
|
@ -103,14 +103,7 @@ See [MLflow LangChain Integration](/docs/integrations/providers/mlflow_tracking)
|
||||
|
||||
SQLDatabase
|
||||
-----------
|
||||
You can connect to Databricks SQL using the SQLDatabase wrapper of LangChain.
|
||||
```
|
||||
from langchain.sql_database import SQLDatabase
|
||||
|
||||
db = SQLDatabase.from_databricks(catalog="samples", schema="nyctaxi")
|
||||
```
|
||||
|
||||
See [Databricks SQL Agent](https://docs.databricks.com/en/large-language-models/langchain.html#databricks-sql-agent) for how to connect Databricks SQL with your LangChain Agent as a powerful querying tool.
|
||||
To connect to Databricks SQL or query structured data, see the [Databricks structured retriever tool documentation](https://docs.databricks.com/en/generative-ai/agent-framework/structured-retrieval-tools.html#table-query-tool) and to create an agent using the above created SQL UDF see [Databricks UC Integration](https://docs.unitycatalog.io/ai/integrations/langchain/).
|
||||
|
||||
Open Models
|
||||
-----------
|
||||
|
22
docs/docs/integrations/providers/graph_rag.mdx
Normal file
22
docs/docs/integrations/providers/graph_rag.mdx
Normal file
@ -0,0 +1,22 @@
|
||||
# Graph RAG
|
||||
|
||||
## Overview
|
||||
|
||||
[Graph RAG](https://datastax.github.io/graph-rag/) provides a retriever interface
|
||||
that combines **unstructured** similarity search on vectors with **structured**
|
||||
traversal of metadata properties. This enables graph-based retrieval over **existing**
|
||||
vector stores.
|
||||
|
||||
## Installation and setup
|
||||
|
||||
```bash
|
||||
pip install langchain-graph-retriever
|
||||
```
|
||||
|
||||
## Retrievers
|
||||
|
||||
```python
|
||||
from langchain_graph_retriever import GraphRetriever
|
||||
```
|
||||
|
||||
For more information, see the [Graph RAG Integration Guide](/docs/integrations/retrievers/graph_rag).
|
@ -81,6 +81,13 @@
|
||||
"llm.invoke(\"Tell me a joke about artificial intelligence.\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"For a more detailed walkthrough of the ChatSambaNovaCloud component, see [this notebook](https://python.langchain.com/docs/integrations/chat/sambanova/)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
@ -93,6 +100,13 @@
|
||||
"llm.invoke(\"Tell me a joke about artificial intelligence.\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"For a more detailed walkthrough of the ChatSambaStudio component, see [this notebook](https://python.langchain.com/docs/integrations/chat/sambastudio/)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
@ -116,7 +130,14 @@
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"API Reference [langchain-sambanova](https://python.langchain.com/api_reference/sambanova/index.html)"
|
||||
"For a more detailed walkthrough of the SambaStudioEmbeddings component, see [this notebook](https://python.langchain.com/docs/integrations/text_embedding/sambanova/)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"API Reference [langchain-sambanova](https://docs.sambanova.ai/cloud/api-reference)"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
379
docs/docs/integrations/retrievers/graph_rag.mdx
Normal file
379
docs/docs/integrations/retrievers/graph_rag.mdx
Normal file
@ -0,0 +1,379 @@
|
||||
---
|
||||
sidebar_label: Graph RAG
|
||||
description: Graph traversal over any Vector Store using document metadata.
|
||||
---
|
||||
|
||||
import ChatModelTabs from "@theme/ChatModelTabs";
|
||||
import EmbeddingTabs from "@theme/EmbeddingTabs";
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
|
||||
# Graph RAG
|
||||
|
||||
This guide provides an introduction to Graph RAG. For detailed documentation of all
|
||||
supported features and configurations, refer to the
|
||||
[Graph RAG Project Page](https://datastax.github.io/graph-rag/).
|
||||
|
||||
## Overview
|
||||
|
||||
The `GraphRetriever` from the `langchain-graph-retriever` package provides a LangChain
|
||||
[retriever](/docs/concepts/retrievers/) that combines **unstructured** similarity search
|
||||
on vectors with **structured** traversal of metadata properties. This enables graph-based
|
||||
retrieval over an **existing** vector store.
|
||||
|
||||
### Integration details
|
||||
|
||||
| Retriever | Source | PyPI Package | Latest | Project Page |
|
||||
| :--- | :--- | :---: | :---: | :---: |
|
||||
| GraphRetriever | [github.com/datastax/graph-rag](https://github.com/datastax/graph-rag/tree/main/packages/langchain-graph-retriever) | [langchain-graph-retriever](https://pypi.org/project/langchain-graph-retriever/) |  | [Graph RAG](https://datastax.github.io/graph-rag/) |
|
||||
|
||||
|
||||
## Benefits
|
||||
|
||||
* [**Link based on existing metadata:**](https://datastax.github.io/graph-rag/get-started/)
|
||||
Use existing metadata fields without additional processing. Retrieve more from an
|
||||
existing vector store!
|
||||
|
||||
* [**Change links on demand:**](https://datastax.github.io/graph-rag/get-started/edges/)
|
||||
Edges can be specified on-the-fly, allowing different relationships to be traversed
|
||||
based on the question.
|
||||
|
||||
|
||||
* [**Pluggable Traversal Strategies:**](https://datastax.github.io/graph-rag/get-started/strategies/)
|
||||
Use built-in traversal strategies like Eager or MMR, or define custom logic to select
|
||||
which nodes to explore.
|
||||
|
||||
* [**Broad compatibility:**](https://datastax.github.io/graph-rag/get-started/adapters/)
|
||||
Adapters are available for a variety of vector stores with support for additional
|
||||
stores easily added.
|
||||
|
||||
## Setup
|
||||
|
||||
### Installation
|
||||
|
||||
This retriever lives in the `langchain-graph-retriever` package.
|
||||
|
||||
```bash
|
||||
pip install -qU langchain-graph-retriever
|
||||
```
|
||||
## Instantiation
|
||||
|
||||
The following examples will show how to perform graph traversal over some sample
|
||||
Documents about animals.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
<details>
|
||||
<summary>Toggle for Details</summary>
|
||||
<div>
|
||||
1. Ensure you have Python 3.10+ installed
|
||||
|
||||
1. Install the following package that provides sample data.
|
||||
```bash
|
||||
pip install -qU graph_rag_example_helpers
|
||||
```
|
||||
|
||||
1. Download the test documents:
|
||||
```python
|
||||
from graph_rag_example_helpers.datasets.animals import fetch_documents
|
||||
animals = fetch_documents()
|
||||
```
|
||||
|
||||
1. <EmbeddingTabs/>
|
||||
</div>
|
||||
</details>
|
||||
|
||||
### Populating the Vector store
|
||||
|
||||
This section shows how to populate a variety of vector stores with the sample data.
|
||||
|
||||
For help on choosing one of the vector stores below, or to add support for your
|
||||
vector store, consult the documentation about
|
||||
[Adapters and Supported Stores](https://datastax.github.io/graph-rag/guide/adapters/).
|
||||
|
||||
<Tabs groupId="vector-store" queryString>
|
||||
<TabItem value="astra-db" label="AstraDB" default>
|
||||
<div style={{ paddingLeft: '30px' }}>
|
||||
Install the `langchain-graph-retriever` package with the `astra` extra:
|
||||
|
||||
```bash
|
||||
pip install "langchain-graph-retriever[astra]"
|
||||
```
|
||||
|
||||
Then create a vector store and load the test documents:
|
||||
|
||||
```python
|
||||
from langchain_astradb import AstraDBVectorStore
|
||||
|
||||
vector_store = AstraDBVectorStore.from_documents(
|
||||
documents=animals,
|
||||
embedding=embeddings,
|
||||
collection_name="animals",
|
||||
api_endpoint=ASTRA_DB_API_ENDPOINT,
|
||||
token=ASTRA_DB_APPLICATION_TOKEN,
|
||||
)
|
||||
```
|
||||
For the `ASTRA_DB_API_ENDPOINT` and `ASTRA_DB_APPLICATION_TOKEN` credentials,
|
||||
consult the [AstraDB Vector Store Guide](/docs/integrations/vectorstores/astradb).
|
||||
|
||||
:::note
|
||||
For faster initial testing, consider using the **InMemory** Vector Store.
|
||||
:::
|
||||
</div>
|
||||
</TabItem>
|
||||
<TabItem value="cassandra" label="Apache Cassandra">
|
||||
<div style={{ paddingLeft: '30px' }}>
|
||||
Install the `langchain-graph-retriever` package with the `cassandra` extra:
|
||||
|
||||
```bash
|
||||
pip install "langchain-graph-retriever[cassandra]"
|
||||
```
|
||||
|
||||
Then create a vector store and load the test documents:
|
||||
|
||||
```python
|
||||
from langchain_community.vectorstores.cassandra import Cassandra
|
||||
from langchain_graph_retriever.transformers import ShreddingTransformer
|
||||
|
||||
vector_store = Cassandra.from_documents(
|
||||
documents=list(ShreddingTransformer().transform_documents(animals)),
|
||||
embedding=embeddings,
|
||||
table_name="animals",
|
||||
)
|
||||
```
|
||||
|
||||
For help creating a Cassandra connection, consult the
|
||||
[Apache Cassandra Vector Store Guide](/docs/integrations/vectorstores/cassandra#connection-parameters)
|
||||
|
||||
:::note
|
||||
Apache Cassandra doesn't support searching in nested metadata. Because of this
|
||||
it is necessary to use the [`ShreddingTransformer`](https://datastax.github.io/graph-rag/reference/langchain_graph_retriever/transformers/#langchain_graph_retriever.transformers.shredding.ShreddingTransformer)
|
||||
when inserting documents.
|
||||
:::
|
||||
</div>
|
||||
</TabItem>
|
||||
<TabItem value="opensearch" label="OpenSearch">
|
||||
<div style={{ paddingLeft: '30px' }}>
|
||||
Install the `langchain-graph-retriever` package with the `opensearch` extra:
|
||||
|
||||
```bash
|
||||
pip install "langchain-graph-retriever[opensearch]"
|
||||
```
|
||||
|
||||
Then create a vector store and load the test documents:
|
||||
|
||||
```python
|
||||
from langchain_community.vectorstores import OpenSearchVectorSearch
|
||||
|
||||
vector_store = OpenSearchVectorSearch.from_documents(
|
||||
documents=animals,
|
||||
embedding=embeddings,
|
||||
engine="faiss",
|
||||
index_name="animals",
|
||||
opensearch_url=OPEN_SEARCH_URL,
|
||||
bulk_size=500,
|
||||
)
|
||||
```
|
||||
|
||||
For help creating an OpenSearch connection, consult the
|
||||
[OpenSearch Vector Store Guide](/docs/integrations/vectorstores/opensearch).
|
||||
</div>
|
||||
</TabItem>
|
||||
<TabItem value="chroma" label="Chroma">
|
||||
<div style={{ paddingLeft: '30px' }}>
|
||||
Install the `langchain-graph-retriever` package with the `chroma` extra:
|
||||
|
||||
```bash
|
||||
pip install "langchain-graph-retriever[chroma]"
|
||||
```
|
||||
|
||||
Then create a vector store and load the test documents:
|
||||
|
||||
```python
|
||||
from langchain_chroma.vectorstores import Chroma
|
||||
from langchain_graph_retriever.transformers import ShreddingTransformer
|
||||
|
||||
vector_store = Chroma.from_documents(
|
||||
documents=list(ShreddingTransformer().transform_documents(animals)),
|
||||
embedding=embeddings,
|
||||
collection_name="animals",
|
||||
)
|
||||
```
|
||||
|
||||
For help creating an Chroma connection, consult the
|
||||
[Chroma Vector Store Guide](/docs/integrations/vectorstores/chroma).
|
||||
|
||||
:::note
|
||||
Chroma doesn't support searching in nested metadata. Because of this
|
||||
it is necessary to use the [`ShreddingTransformer`](https://datastax.github.io/graph-rag/reference/langchain_graph_retriever/transformers/#langchain_graph_retriever.transformers.shredding.ShreddingTransformer)
|
||||
when inserting documents.
|
||||
:::
|
||||
</div>
|
||||
</TabItem>
|
||||
<TabItem value="in-memory" label="InMemory" default>
|
||||
<div style={{ paddingLeft: '30px' }}>
|
||||
Install the `langchain-graph-retriever` package:
|
||||
|
||||
```bash
|
||||
pip install "langchain-graph-retriever"
|
||||
```
|
||||
|
||||
Then create a vector store and load the test documents:
|
||||
|
||||
```python
|
||||
from langchain_core.vectorstores import InMemoryVectorStore
|
||||
|
||||
vector_store = InMemoryVectorStore.from_documents(
|
||||
documents=animals,
|
||||
embedding=embeddings,
|
||||
)
|
||||
```
|
||||
|
||||
:::tip
|
||||
Using the `InMemoryVectorStore` is the fastest way to get started with Graph RAG
|
||||
but it isn't recommended for production use. Instead it is recommended to use
|
||||
**AstraDB** or **OpenSearch**.
|
||||
:::
|
||||
</div>
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
### Graph Traversal
|
||||
|
||||
This graph retriever starts with a single animal that best matches the query, then
|
||||
traverses to other animals sharing the same `habitat` and/or `origin`.
|
||||
|
||||
```python
|
||||
from graph_retriever.strategies import Eager
|
||||
from langchain_graph_retriever import GraphRetriever
|
||||
|
||||
traversal_retriever = GraphRetriever(
|
||||
store = vector_store,
|
||||
edges = [("habitat", "habitat"), ("origin", "origin")],
|
||||
strategy = Eager(k=5, start_k=1, max_depth=2),
|
||||
)
|
||||
```
|
||||
|
||||
The above creates a graph traversing retriever that starts with the nearest
|
||||
animal (`start_k=1`), retrieves 5 documents (`k=5`) and limits the search to documents
|
||||
that are at most 2 steps away from the first animal (`max_depth=2`).
|
||||
|
||||
The `edges` define how metadata values can be used for traversal. In this case, every
|
||||
animal is connected to other animals with the same `habitat` and/or `origin`.
|
||||
|
||||
```python
|
||||
results = traversal_retriever.invoke("what animals could be found near a capybara?")
|
||||
|
||||
for doc in results:
|
||||
print(f"{doc.id}: {doc.page_content}")
|
||||
```
|
||||
|
||||
```output
|
||||
capybara: capybaras are the largest rodents in the world and are highly social animals.
|
||||
heron: herons are wading birds known for their long legs and necks, often seen near water.
|
||||
crocodile: crocodiles are large reptiles with powerful jaws and a long lifespan, often living over 70 years.
|
||||
frog: frogs are amphibians known for their jumping ability and croaking sounds.
|
||||
duck: ducks are waterfowl birds known for their webbed feet and quacking sounds.
|
||||
```
|
||||
|
||||
Graph traversal improves retrieval quality by leveraging structured relationships in
|
||||
the data. Unlike standard similarity search (see below), it provides a clear,
|
||||
explainable rationale for why documents are selected.
|
||||
|
||||
In this case, the documents `capybara`, `heron`, `frog`, `crocodile`, and `newt` all
|
||||
share the same `habitat=wetlands`, as defined by their metadata. This should increase
|
||||
Document Relevance and the quality of the answer from the LLM.
|
||||
|
||||
### Comparison to Standard Retrieval
|
||||
|
||||
When `max_depth=0`, the graph traversing retriever behaves like a standard retriever:
|
||||
|
||||
```python
|
||||
standard_retriever = GraphRetriever(
|
||||
store = vector_store,
|
||||
edges = [("habitat", "habitat"), ("origin", "origin")],
|
||||
strategy = Eager(k=5, start_k=5, max_depth=0),
|
||||
)
|
||||
```
|
||||
|
||||
This creates a retriever that starts with the nearest 5 animals (`start_k=5`),
|
||||
and returns them without any traversal (`max_depth=0`). The edge definitions
|
||||
are ignored in this case.
|
||||
|
||||
This is essentially the same as:
|
||||
|
||||
```python
|
||||
standard_retriever = vector_store.as_retriever(search_kwargs={"k":5})
|
||||
```
|
||||
|
||||
For either case, invoking the retriever returns:
|
||||
|
||||
```python
|
||||
results = standard_retriever.invoke("what animals could be found near a capybara?")
|
||||
|
||||
for doc in results:
|
||||
print(f"{doc.id}: {doc.page_content}")
|
||||
```
|
||||
|
||||
```output
|
||||
capybara: capybaras are the largest rodents in the world and are highly social animals.
|
||||
iguana: iguanas are large herbivorous lizards often found basking in trees and near water.
|
||||
guinea pig: guinea pigs are small rodents often kept as pets due to their gentle and social nature.
|
||||
hippopotamus: hippopotamuses are large semi-aquatic mammals known for their massive size and territorial behavior.
|
||||
boar: boars are wild relatives of pigs, known for their tough hides and tusks.
|
||||
```
|
||||
|
||||
These documents are joined based on similarity alone. Any structural data that existed
|
||||
in the store is ignored. As compared to graph retrieval, this can decrease Document
|
||||
Relevance because the returned results have a lower chance of being helpful to answer
|
||||
the query.
|
||||
|
||||
## Usage
|
||||
|
||||
Following the examples above, `.invoke` is used to initiate retrieval on a query.
|
||||
|
||||
## Use within a chain
|
||||
|
||||
Like other retrievers, `GraphRetriever` can be incorporated into LLM applications
|
||||
via [chains](/docs/how_to/sequence/).
|
||||
|
||||
<ChatModelTabs customVarName="llm" />
|
||||
|
||||
```python
|
||||
from langchain_core.output_parsers import StrOutputParser
|
||||
from langchain_core.prompts import ChatPromptTemplate
|
||||
from langchain_core.runnables import RunnablePassthrough
|
||||
|
||||
prompt = ChatPromptTemplate.from_template(
|
||||
"""Answer the question based only on the context provided.
|
||||
|
||||
Context: {context}
|
||||
|
||||
Question: {question}"""
|
||||
)
|
||||
|
||||
def format_docs(docs):
|
||||
return "\n\n".join(f"text: {doc.page_content} metadata: {doc.metadata}" for doc in docs)
|
||||
|
||||
chain = (
|
||||
{"context": traversal_retriever | format_docs, "question": RunnablePassthrough()}
|
||||
| prompt
|
||||
| llm
|
||||
| StrOutputParser()
|
||||
)
|
||||
```
|
||||
|
||||
```python
|
||||
chain.invoke("what animals could be found near a capybara?")
|
||||
```
|
||||
|
||||
```output
|
||||
Animals that could be found near a capybara include herons, crocodiles, frogs,
|
||||
and ducks, as they all inhabit wetlands.
|
||||
```
|
||||
|
||||
## API reference
|
||||
|
||||
To explore all available parameters and advanced configurations, refer to the
|
||||
[Graph RAG API reference](https://datastax.github.io/graph-rag/reference/).
|
@ -21,16 +21,16 @@
|
||||
"source": [
|
||||
"# SambaStudioEmbeddings\n",
|
||||
"\n",
|
||||
"This will help you get started with SambaNova's SambaStudio embedding models using LangChain. For detailed documentation on `SambaStudioEmbeddings` features and configuration options, please refer to the [API reference](https://python.langchain.com/api_reference/sambanova/embeddings/langchain_sambanova.embeddingsSambaStudioEmbeddings.html).\n",
|
||||
"This will help you get started with SambaNova's SambaStudio embedding models using LangChain. For detailed documentation on `SambaStudioEmbeddings` features and configuration options, please refer to the [API reference](https://docs.sambanova.ai/sambastudio/latest/index.html).\n",
|
||||
"\n",
|
||||
"**[SambaNova](https://sambanova.ai/)'s** [Sambastudio](https://sambanova.ai/technology/full-stack-ai-platform) is a platform for running your own open-source models\n",
|
||||
"**[SambaNova](https://sambanova.ai/)'s** [SambaStudio](https://sambanova.ai/technology/full-stack-ai-platform) is a platform for running your own open-source models\n",
|
||||
"\n",
|
||||
"## Overview\n",
|
||||
"### Integration details\n",
|
||||
"\n",
|
||||
"| Provider | Package |\n",
|
||||
"|:--------:|:-------:|\n",
|
||||
"| [SambaNova](/docs/integrations/providers/sambanova/) | [langchain-sambanova](https://python.langchain.com/api_reference/langchain_sambanova/embeddings/langchain_sambanova.embeddings.SambaStudioEmbeddings.html) |\n",
|
||||
"| [SambaNova](/docs/integrations/providers/sambanova/) | [langchain-sambanova](https://python.langchain.com/docs/integrations/providers/sambanova/) |\n",
|
||||
"\n",
|
||||
"## Setup\n",
|
||||
"\n",
|
||||
@ -227,7 +227,7 @@
|
||||
"source": [
|
||||
"## API Reference\n",
|
||||
"\n",
|
||||
"For detailed documentation on `SambaNovaEmbeddings` features and configuration options, please refer to the [API reference](https://python.langchain.com/api_reference/langchain_sambanova/embeddings/langchain_sambanova.embeddings.SambaStudioEmbeddings.html).\n"
|
||||
"For detailed documentation on `SambaStudio` features and configuration options, please refer to the [API reference](https://docs.sambanova.ai/sambastudio/latest/api-ref-landing.html).\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
256
docs/docs/integrations/tools/apify_actors.ipynb
Normal file
256
docs/docs/integrations/tools/apify_actors.ipynb
Normal file
@ -0,0 +1,256 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "_9MNj58sIkGN"
|
||||
},
|
||||
"source": [
|
||||
"# Apify Actor\n",
|
||||
"\n",
|
||||
"## Overview\n",
|
||||
"\n",
|
||||
">[Apify Actors](https://docs.apify.com/platform/actors) are cloud programs designed for a wide range of web scraping, crawling, and data extraction tasks. These actors facilitate automated data gathering from the web, enabling users to extract, process, and store information efficiently. Actors can be used to perform tasks like scraping e-commerce sites for product details, monitoring price changes, or gathering search engine results. They integrate seamlessly with [Apify Datasets](https://docs.apify.com/platform/storage/dataset), allowing the structured data collected by actors to be stored, managed, and exported in formats like JSON, CSV, or Excel for further analysis or use.\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "OHLF9t9v9HCb"
|
||||
},
|
||||
"source": [
|
||||
"## Setup\n",
|
||||
"\n",
|
||||
"This integration lives in the [langchain-apify](https://pypi.org/project/langchain-apify/) package. The package can be installed using pip.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "4DdGmBn5IbXz"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install langchain-apify"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "rEAwonXqwggR"
|
||||
},
|
||||
"source": [
|
||||
"### Prerequisites\n",
|
||||
"\n",
|
||||
"- **Apify account**: Register your free Apify account [here](https://console.apify.com/sign-up).\n",
|
||||
"- **Apify API token**: Learn how to get your API token in the [Apify documentation](https://docs.apify.com/platform/integrations/api)."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "9nJOl4MBMkcR"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"APIFY_API_TOKEN\"] = \"your-apify-api-token\"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = \"your-openai-api-key\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "UfoQxAlCxR9q"
|
||||
},
|
||||
"source": [
|
||||
"## Instantiation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "qG9KtXtLM8i7"
|
||||
},
|
||||
"source": [
|
||||
"Here we instantiate the `ApifyActorsTool` to be able to call [RAG Web Browser](https://apify.com/apify/rag-web-browser) Apify Actor. This Actor provides web browsing functionality for AI and LLM applications, similar to the web browsing feature in ChatGPT. Any Actor from the [Apify Store](https://apify.com/store) can be used in this way."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 43,
|
||||
"metadata": {
|
||||
"id": "cyxeTlPnM4Ya"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_apify import ApifyActorsTool\n",
|
||||
"\n",
|
||||
"tool = ApifyActorsTool(\"apify/rag-web-browser\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "fGDLvDCqyKWO"
|
||||
},
|
||||
"source": [
|
||||
"## Invocation\n",
|
||||
"\n",
|
||||
"The `ApifyActorsTool` takes a single argument, which is `run_input` - a dictionary that is passed as a run input to the Actor. Run input schema documentation can be found in the input section of the Actor details page. See [RAG Web Browser input schema](https://apify.com/apify/rag-web-browser/input-schema).\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "nTWy6Hx1yk04"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"tool.invoke({\"run_input\": {\"query\": \"what is apify?\", \"maxResults\": 2}})"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "kQsa27hoO58S"
|
||||
},
|
||||
"source": [
|
||||
"## Chaining\n",
|
||||
"\n",
|
||||
"We can provide the created tool to an [agent](https://python.langchain.com/docs/tutorials/agents/). When asked to search for information, the agent will call the Apify Actor, which will search the web, and then retrieve the search results.\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "YySvLskW72Y8"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install langgraph langchain-openai"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 44,
|
||||
"metadata": {
|
||||
"id": "QEDz07btO5Gi"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.messages import ToolMessage\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"from langgraph.prebuilt import create_react_agent\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI(model=\"gpt-4o\")\n",
|
||||
"tools = [tool]\n",
|
||||
"graph = create_react_agent(model, tools=tools)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 45,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "XS1GEyNkQxGu",
|
||||
"outputId": "195273d7-034c-425b-f3f9-95c0a9fb0c9e"
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"================================\u001b[1m Human Message \u001b[0m=================================\n",
|
||||
"\n",
|
||||
"search for what is Apify\n",
|
||||
"==================================\u001b[1m Ai Message \u001b[0m==================================\n",
|
||||
"Tool Calls:\n",
|
||||
" apify_actor_apify_rag-web-browser (call_27mjHLzDzwa5ZaHWCMH510lm)\n",
|
||||
" Call ID: call_27mjHLzDzwa5ZaHWCMH510lm\n",
|
||||
" Args:\n",
|
||||
" run_input: {\"run_input\":{\"query\":\"Apify\",\"maxResults\":3,\"outputFormats\":[\"markdown\"]}}\n",
|
||||
"==================================\u001b[1m Ai Message \u001b[0m==================================\n",
|
||||
"\n",
|
||||
"Apify is a comprehensive platform for web scraping, browser automation, and data extraction. It offers a wide array of tools and services that cater to developers and businesses looking to extract data from websites efficiently and effectively. Here's an overview of Apify:\n",
|
||||
"\n",
|
||||
"1. **Ecosystem and Tools**:\n",
|
||||
" - Apify provides an ecosystem where developers can build, deploy, and publish data extraction and web automation tools called Actors.\n",
|
||||
" - The platform supports various use cases such as extracting data from social media platforms, conducting automated browser-based tasks, and more.\n",
|
||||
"\n",
|
||||
"2. **Offerings**:\n",
|
||||
" - Apify offers over 3,000 ready-made scraping tools and code templates.\n",
|
||||
" - Users can also build custom solutions or hire Apify's professional services for more tailored data extraction needs.\n",
|
||||
"\n",
|
||||
"3. **Technology and Integration**:\n",
|
||||
" - The platform supports integration with popular tools and services like Zapier, GitHub, Google Sheets, Pinecone, and more.\n",
|
||||
" - Apify supports open-source tools and technologies such as JavaScript, Python, Puppeteer, Playwright, Selenium, and its own Crawlee library for web crawling and browser automation.\n",
|
||||
"\n",
|
||||
"4. **Community and Learning**:\n",
|
||||
" - Apify hosts a community on Discord where developers can get help and share expertise.\n",
|
||||
" - It offers educational resources through the Web Scraping Academy to help users become proficient in data scraping and automation.\n",
|
||||
"\n",
|
||||
"5. **Enterprise Solutions**:\n",
|
||||
" - Apify provides enterprise-grade web data extraction solutions with high reliability, 99.95% uptime, and compliance with SOC2, GDPR, and CCPA standards.\n",
|
||||
"\n",
|
||||
"For more information, you can visit [Apify's official website](https://apify.com/) or their [GitHub page](https://github.com/apify) which contains their code repositories and further details about their projects.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"inputs = {\"messages\": [(\"user\", \"search for what is Apify\")]}\n",
|
||||
"for s in graph.stream(inputs, stream_mode=\"values\"):\n",
|
||||
" message = s[\"messages\"][-1]\n",
|
||||
" # skip tool messages\n",
|
||||
" if isinstance(message, ToolMessage):\n",
|
||||
" continue\n",
|
||||
" message.pretty_print()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "WYXuQIQx8AvG"
|
||||
},
|
||||
"source": [
|
||||
"## API reference\n",
|
||||
"\n",
|
||||
"For more information on how to use this integration, see the [git repository](https://github.com/apify/langchain-apify) or the [Apify integration documentation](https://docs.apify.com/platform/integrations/langgraph)."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "f1NnMik78oib"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"provenance": [],
|
||||
"toc_visible": true
|
||||
},
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
@ -66,21 +66,20 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from databricks.sdk import WorkspaceClient\n",
|
||||
"from langchain_community.tools.databricks import UCFunctionToolkit\n",
|
||||
"from databricks_langchain.uc_ai import (\n",
|
||||
" DatabricksFunctionClient,\n",
|
||||
" UCFunctionToolkit,\n",
|
||||
" set_uc_function_client,\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"tools = (\n",
|
||||
" UCFunctionToolkit(\n",
|
||||
" # You can find the SQL warehouse ID in its UI after creation.\n",
|
||||
" warehouse_id=\"xxxx123456789\"\n",
|
||||
" )\n",
|
||||
" .include(\n",
|
||||
" # Include functions as tools using their qualified names.\n",
|
||||
" # You can use \"{catalog_name}.{schema_name}.*\" to get all functions in a schema.\n",
|
||||
" \"main.tools.python_exec\",\n",
|
||||
" )\n",
|
||||
" .get_tools()\n",
|
||||
")"
|
||||
"client = DatabricksFunctionClient()\n",
|
||||
"set_uc_function_client(client)\n",
|
||||
"\n",
|
||||
"tools = UCFunctionToolkit(\n",
|
||||
" # Include functions as tools using their qualified names.\n",
|
||||
" # You can use \"{catalog_name}.{schema_name}.*\" to get all functions in a schema.\n",
|
||||
" function_names=[\"main.tools.python_exec\"]\n",
|
||||
").tools"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -5,7 +5,7 @@
|
||||
"id": "a991a6f8-1897-4f49-a191-ae3bdaeda856",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Eleven Labs Text2Speech\n",
|
||||
"# ElevenLabs Text2Speech\n",
|
||||
"\n",
|
||||
"This notebook shows how to interact with the `ElevenLabs API` to achieve text-to-speech capabilities."
|
||||
]
|
||||
@ -37,7 +37,7 @@
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"ELEVEN_API_KEY\"] = \"\""
|
||||
"os.environ[\"ELEVENLABS_API_KEY\"] = \"\""
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -64,7 +64,10 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import getpass\n",
|
||||
"import os"
|
||||
"import os\n",
|
||||
"\n",
|
||||
"if not os.environ.get(\"JINA_API_KEY\"):\n",
|
||||
" os.environ[\"JINA_API_KEY\"] = getpass.getpass(\"Jina API key:\\n\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -36,6 +36,7 @@ def _reorder_keys(p):
|
||||
"js",
|
||||
"downloads",
|
||||
"downloads_updated_at",
|
||||
"disabled",
|
||||
]
|
||||
if set(keys) - set(key_order):
|
||||
raise ValueError(f"Unexpected keys: {set(keys) - set(key_order)}")
|
||||
|
@ -191,6 +191,19 @@ ${llmVarName} = AzureChatOpenAI(
|
||||
apiKeyName: "TOGETHER_API_KEY",
|
||||
packageName: "langchain[together]",
|
||||
},
|
||||
{
|
||||
value: "ibm",
|
||||
label: "IBM",
|
||||
text: `from langchain_ibm import ChatWatsonx
|
||||
|
||||
${llmVarName} = ChatWatsonx(
|
||||
model_id="ibm/granite-34b-code-instruct",
|
||||
url="https://us-south.ml.cloud.ibm.com",
|
||||
project_id="<WATSONX PROJECT_ID>"
|
||||
)`,
|
||||
apiKeyName: "WATSONX_APIKEY",
|
||||
packageName: "langchain-ibm",
|
||||
},
|
||||
{
|
||||
value: "databricks",
|
||||
label: "Databricks",
|
||||
|
@ -27,6 +27,8 @@ export default function EmbeddingTabs(props) {
|
||||
hideNvidia,
|
||||
voyageaiParams,
|
||||
hideVoyageai,
|
||||
ibmParams,
|
||||
hideIBM,
|
||||
fakeEmbeddingParams,
|
||||
hideFakeEmbedding,
|
||||
customVarName,
|
||||
@ -45,6 +47,8 @@ export default function EmbeddingTabs(props) {
|
||||
const nomicsParamsOrDefault = nomicParams ?? `model="nomic-embed-text-v1.5"`;
|
||||
const nvidiaParamsOrDefault = nvidiaParams ?? `model="NV-Embed-QA"`;
|
||||
const voyageaiParamsOrDefault = voyageaiParams ?? `model="voyage-3"`;
|
||||
const ibmParamsOrDefault = ibmParams ??
|
||||
`\n model_id="ibm/slate-125m-english-rtrvr",\n url="https://us-south.ml.cloud.ibm.com",\n project_id="<WATSONX PROJECT_ID>",\n`;
|
||||
const fakeEmbeddingParamsOrDefault = fakeEmbeddingParams ?? `size=4096`;
|
||||
|
||||
const embeddingVarName = customVarName ?? "embeddings";
|
||||
@ -149,6 +153,15 @@ export default function EmbeddingTabs(props) {
|
||||
default: false,
|
||||
shouldHide: hideVoyageai,
|
||||
},
|
||||
{
|
||||
value: "IBM",
|
||||
label: "IBM",
|
||||
text: `from langchain_ibm import WatsonxEmbeddings\n\n${embeddingVarName} = WatsonxEmbeddings(${ibmParamsOrDefault})`,
|
||||
apiKeyName: "WATSONX_APIKEY",
|
||||
packageName: "langchain-ibm",
|
||||
default: false,
|
||||
shouldHide: hideIBM,
|
||||
},
|
||||
{
|
||||
value: "Fake",
|
||||
label: "Fake",
|
||||
|
BIN
docs/static/img/ApifyActors.png
vendored
BIN
docs/static/img/ApifyActors.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 147 KiB After Width: | Height: | Size: 212 KiB |
@ -3,10 +3,8 @@ requires = ["pdm-backend"]
|
||||
build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = [
|
||||
{name = "Erick Friis", email = "erick@langchain.dev"},
|
||||
]
|
||||
license = {text = "MIT"}
|
||||
authors = [{ name = "Erick Friis", email = "erick@langchain.dev" }]
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"typer[all]<1.0.0,>=0.9.0",
|
||||
@ -31,33 +29,25 @@ langchain = "langchain_cli.cli:app"
|
||||
langchain-cli = "langchain_cli.cli:app"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pytest<8.0.0,>=7.4.2",
|
||||
"pytest-watch<5.0.0,>=4.2.0",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
"mypy<2.0.0,>=1.13.0",
|
||||
]
|
||||
test = [
|
||||
"langchain @ file:///${PROJECT_ROOT}/../langchain",
|
||||
]
|
||||
typing = [
|
||||
"langchain @ file:///${PROJECT_ROOT}/../langchain",
|
||||
]
|
||||
dev = ["pytest<8.0.0,>=7.4.2", "pytest-watch<5.0.0,>=4.2.0"]
|
||||
lint = ["ruff<1.0,>=0.5", "mypy<2.0.0,>=1.13.0"]
|
||||
test = ["langchain"]
|
||||
typing = ["langchain"]
|
||||
test_integration = []
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain = { path = "../langchain", editable = true }
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes
|
||||
"I", # isort
|
||||
"T201", # print
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes
|
||||
"I", # isort
|
||||
"T201", # print
|
||||
]
|
||||
|
||||
[tool.mypy]
|
||||
exclude = [
|
||||
"langchain_cli/integration_template",
|
||||
"langchain_cli/package_template",
|
||||
"langchain_cli/integration_template",
|
||||
"langchain_cli/package_template",
|
||||
]
|
||||
|
127
libs/cli/uv.lock
127
libs/cli/uv.lock
@ -620,8 +620,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.3.18rc1"
|
||||
source = { directory = "../langchain" }
|
||||
version = "0.3.18"
|
||||
source = { editable = "../langchain" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11'" },
|
||||
@ -645,7 +645,7 @@ requires-dist = [
|
||||
{ name = "langchain-aws", marker = "extra == 'aws'" },
|
||||
{ name = "langchain-cohere", marker = "extra == 'cohere'" },
|
||||
{ name = "langchain-community", marker = "extra == 'community'" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-deepseek", marker = "extra == 'deepseek'" },
|
||||
{ name = "langchain-fireworks", marker = "extra == 'fireworks'" },
|
||||
{ name = "langchain-google-genai", marker = "extra == 'google-genai'" },
|
||||
@ -654,8 +654,8 @@ requires-dist = [
|
||||
{ name = "langchain-huggingface", marker = "extra == 'huggingface'" },
|
||||
{ name = "langchain-mistralai", marker = "extra == 'mistralai'" },
|
||||
{ name = "langchain-ollama", marker = "extra == 'ollama'" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'" },
|
||||
{ name = "langchain-text-splitters", specifier = ">=0.3.3,<1.0.0" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'", editable = "../partners/openai" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "langchain-together", marker = "extra == 'together'" },
|
||||
{ name = "langsmith", specifier = ">=0.1.17,<0.4" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.26.4,<2" },
|
||||
@ -671,8 +671,8 @@ requires-dist = [
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "playwright", specifier = ">=1.28.0,<2.0.0" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
@ -682,14 +682,15 @@ lint = [
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = ">=1.5.14,<1.6" },
|
||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "duckdb-engine", specifier = ">=0.9.2,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-openai", directory = "../partners/openai" },
|
||||
{ name = "langchain-tests", directory = "../standard-tests" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-openai", editable = "../partners/openai" },
|
||||
{ name = "langchain-tests", editable = "../standard-tests" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "lark", specifier = ">=1.1.5,<2.0.0" },
|
||||
{ name = "packaging", specifier = ">=24.2" },
|
||||
{ name = "pandas", specifier = ">=2.0.0,<3.0.0" },
|
||||
@ -708,8 +709,8 @@ test = [
|
||||
]
|
||||
test-integration = [
|
||||
{ name = "cassio", specifier = ">=0.1.0,<1.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "langchainhub", specifier = ">=0.1.16,<1.0.0" },
|
||||
{ name = "pytest-vcr", specifier = ">=1.0.2,<2.0.0" },
|
||||
{ name = "python-dotenv", specifier = ">=1.0.0,<2.0.0" },
|
||||
@ -717,8 +718,8 @@ test-integration = [
|
||||
{ name = "wrapt", specifier = ">=1.15.0,<2.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "mypy-protobuf", specifier = ">=3.0.0,<4.0.0" },
|
||||
{ name = "types-chardet", specifier = ">=5.0.4.6,<6.0.0.0" },
|
||||
@ -777,14 +778,14 @@ lint = [
|
||||
{ name = "mypy", specifier = ">=1.13.0,<2.0.0" },
|
||||
{ name = "ruff", specifier = ">=0.5,<1.0" },
|
||||
]
|
||||
test = [{ name = "langchain", directory = "../langchain" }]
|
||||
test = [{ name = "langchain", editable = "../langchain" }]
|
||||
test-integration = []
|
||||
typing = [{ name = "langchain", directory = "../langchain" }]
|
||||
typing = [{ name = "langchain", editable = "../langchain" }]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.33"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -794,21 +795,93 @@ dependencies = [
|
||||
{ name = "tenacity" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/57/b3/426268e07273c395affc6dd02cdf89803888121cfc59ce60922f363aeff8/langchain_core-0.3.33.tar.gz", hash = "sha256:b5dd93a4e7f8198d2fc6048723b0bfecf7aaf128b0d268cbac19c34c1579b953", size = 331492 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/98/78/463bc92174555cc04b3e234faa169bb8b58f36fff77892d7b8ae2b4f58e4/langchain_core-0.3.33-py3-none-any.whl", hash = "sha256:269706408a2223f863ff1f9616f31903a5712403199d828b50aadbc4c28b553a", size = 412656 },
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "jsonpatch", specifier = ">=1.33,<2.0" },
|
||||
{ name = "langsmith", specifier = ">=0.1.125,<0.4" },
|
||||
{ name = "packaging", specifier = ">=23.2,<25" },
|
||||
{ name = "pydantic", marker = "python_full_version < '3.12.4'", specifier = ">=2.5.2,<3.0.0" },
|
||||
{ name = "pydantic", marker = "python_full_version >= '3.12.4'", specifier = ">=2.7.4,<3.0.0" },
|
||||
{ name = "pyyaml", specifier = ">=5.3" },
|
||||
{ name = "tenacity", specifier = ">=8.1.0,!=8.4.0,<10.0.0" },
|
||||
{ name = "typing-extensions", specifier = ">=4.7" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "grandalf", specifier = ">=0.8,<1.0" },
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = "~=1.5.11" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "grandalf", specifier = ">=0.8,<1.0" },
|
||||
{ name = "langchain-tests", directory = "../standard-tests" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<3" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
{ name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" },
|
||||
{ name = "responses", specifier = ">=0.25.0,<1.0.0" },
|
||||
{ name = "syrupy", specifier = ">=4.0.2,<5.0.0" },
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<1.11" },
|
||||
{ name = "types-jinja2", specifier = ">=2.11.9,<3.0.0" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" },
|
||||
{ name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
version = "0.3.6"
|
||||
source = { editable = "../text-splitters" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/10/35/a6f8d6b1bb0e6e8c00b49bce4d1a115f8b68368b1899f65bb34dbbb44160/langchain_text_splitters-0.3.5.tar.gz", hash = "sha256:11cb7ca3694e5bdd342bc16d3875b7f7381651d4a53cbb91d34f22412ae16443", size = 26318 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/83/f8081c3bea416bd9d9f0c26af795c74f42c24f9ad3c4fbf361b7d69de134/langchain_text_splitters-0.3.5-py3-none-any.whl", hash = "sha256:8c9b059827438c5fa8f327b4df857e307828a5ec815163c9b5c9569a3e82c8ee", size = 31620 },
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "langchain-core", editable = "../core" }]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
]
|
||||
lint = [
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
{ name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" },
|
||||
]
|
||||
test-integration = [
|
||||
{ name = "nltk", specifier = ">=3.9.1,<4.0.0" },
|
||||
{ name = "sentence-transformers", marker = "python_full_version < '3.13'", specifier = ">=2.6.0" },
|
||||
{ name = "spacy", marker = "python_full_version < '3.10'", specifier = ">=3.0.0,<3.8.4" },
|
||||
{ name = "spacy", marker = "python_full_version < '3.13'", specifier = ">=3.0.0,<4.0.0" },
|
||||
{ name = "transformers", specifier = ">=4.47.0,<5.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "lxml-stubs", specifier = ">=0.5.1,<1.0.0" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "tiktoken", specifier = ">=0.8.0,<1.0.0" },
|
||||
{ name = "types-requests", specifier = ">=2.31.0.20240218,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -557,7 +557,7 @@ _EXTRA_OPTIONAL_TOOLS: Dict[str, Tuple[Callable[[KwArg(Any)], BaseTool], List[st
|
||||
_get_dataforseo_api_search_json,
|
||||
["api_login", "api_password", "aiosession"],
|
||||
),
|
||||
"eleven_labs_text2speech": (_get_eleven_labs_text2speech, ["eleven_api_key"]),
|
||||
"eleven_labs_text2speech": (_get_eleven_labs_text2speech, ["elevenlabs_api_key"]),
|
||||
"google_cloud_texttospeech": (_get_google_cloud_texttospeech, []),
|
||||
"read_file": (_get_file_management_tool, []),
|
||||
"reddit_search": (
|
||||
|
@ -1,11 +1,22 @@
|
||||
from typing import Any, Callable, Dict, List
|
||||
|
||||
from langchain_core._api import deprecated
|
||||
from langchain_core.documents import Document
|
||||
from pydantic import BaseModel, model_validator
|
||||
|
||||
from langchain_community.document_loaders.base import BaseLoader
|
||||
|
||||
|
||||
@deprecated(
|
||||
since="0.3.18",
|
||||
message=(
|
||||
"This class is deprecated and will be removed in a future version. "
|
||||
"You can swap to using the `ApifyDatasetLoader`"
|
||||
" implementation in `langchain_apify` package. "
|
||||
"See <https://github.com/apify/langchain-apify>"
|
||||
),
|
||||
alternative_import="langchain_apify.ApifyDatasetLoader",
|
||||
)
|
||||
class ApifyDatasetLoader(BaseLoader, BaseModel):
|
||||
"""Load datasets from `Apify` web scraping, crawling, and data extraction platform.
|
||||
|
||||
|
@ -4,6 +4,7 @@ from decimal import Decimal
|
||||
from hashlib import md5
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, Union
|
||||
|
||||
from langchain_core._api import deprecated
|
||||
from langchain_core.tools import BaseTool, StructuredTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import BaseModel, Field, create_model
|
||||
@ -131,6 +132,11 @@ def _get_default_workspace_client() -> Any:
|
||||
return WorkspaceClient()
|
||||
|
||||
|
||||
@deprecated(
|
||||
since="0.3.18",
|
||||
removal="1.0",
|
||||
alternative_import="databricks_langchain.uc_ai.UCFunctionToolkit",
|
||||
)
|
||||
class UCFunctionToolkit(BaseToolkit):
|
||||
warehouse_id: str = Field(
|
||||
description="The ID of a Databricks SQL Warehouse to execute functions."
|
||||
|
@ -4,5 +4,6 @@ from enum import Enum
|
||||
class ElevenLabsModel(str, Enum):
|
||||
"""Models available for Eleven Labs Text2Speech."""
|
||||
|
||||
MULTI_LINGUAL = "eleven_multilingual_v1"
|
||||
MONO_LINGUAL = "eleven_monolingual_v1"
|
||||
MULTI_LINGUAL = "eleven_multilingual_v2"
|
||||
MULTI_LINGUAL_FLASH = "eleven_flash_v2_5"
|
||||
MONO_LINGUAL = "eleven_flash_v2"
|
||||
|
@ -21,24 +21,26 @@ def _import_elevenlabs() -> Any:
|
||||
class ElevenLabsModel(str, Enum):
|
||||
"""Models available for Eleven Labs Text2Speech."""
|
||||
|
||||
MULTI_LINGUAL = "eleven_multilingual_v1"
|
||||
MONO_LINGUAL = "eleven_monolingual_v1"
|
||||
MULTI_LINGUAL = "eleven_multilingual_v2"
|
||||
MULTI_LINGUAL_FLASH = "eleven_flash_v2_5"
|
||||
MONO_LINGUAL = "eleven_flash_v2"
|
||||
|
||||
|
||||
class ElevenLabsText2SpeechTool(BaseTool): # type: ignore[override]
|
||||
"""Tool that queries the Eleven Labs Text2Speech API.
|
||||
|
||||
In order to set this up, follow instructions at:
|
||||
https://docs.elevenlabs.io/welcome/introduction
|
||||
https://elevenlabs.io/docs
|
||||
"""
|
||||
|
||||
model: Union[ElevenLabsModel, str] = ElevenLabsModel.MULTI_LINGUAL
|
||||
voice: str = "JBFqnCBsd6RMkjVDRZzb"
|
||||
|
||||
name: str = "eleven_labs_text2speech"
|
||||
description: str = (
|
||||
"A wrapper around Eleven Labs Text2Speech. "
|
||||
"Useful for when you need to convert text to speech. "
|
||||
"It supports multiple languages, including English, German, Polish, "
|
||||
"It supports more than 30 languages, including English, German, Polish, "
|
||||
"Spanish, Italian, French, Portuguese, and Hindi. "
|
||||
)
|
||||
|
||||
@ -46,7 +48,7 @@ class ElevenLabsText2SpeechTool(BaseTool): # type: ignore[override]
|
||||
@classmethod
|
||||
def validate_environment(cls, values: Dict) -> Any:
|
||||
"""Validate that api key exists in environment."""
|
||||
_ = get_from_dict_or_env(values, "eleven_api_key", "ELEVEN_API_KEY")
|
||||
_ = get_from_dict_or_env(values, "elevenlabs_api_key", "ELEVENLABS_API_KEY")
|
||||
|
||||
return values
|
||||
|
||||
@ -55,10 +57,16 @@ class ElevenLabsText2SpeechTool(BaseTool): # type: ignore[override]
|
||||
) -> str:
|
||||
"""Use the tool."""
|
||||
elevenlabs = _import_elevenlabs()
|
||||
client = elevenlabs.client.ElevenLabs()
|
||||
try:
|
||||
speech = elevenlabs.generate(text=query, model=self.model)
|
||||
speech = client.text_to_speech.convert(
|
||||
text=query,
|
||||
model_id=self.model,
|
||||
voice_id=self.voice,
|
||||
output_format="mp3_44100_128",
|
||||
)
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="bx", suffix=".wav", delete=False
|
||||
mode="bx", suffix=".mp3", delete=False
|
||||
) as f:
|
||||
f.write(speech)
|
||||
return f.name
|
||||
@ -77,5 +85,8 @@ class ElevenLabsText2SpeechTool(BaseTool): # type: ignore[override]
|
||||
"""Stream the text as speech as it is generated.
|
||||
Play the text in your speakers."""
|
||||
elevenlabs = _import_elevenlabs()
|
||||
speech_stream = elevenlabs.generate(text=query, model=self.model, stream=True)
|
||||
client = elevenlabs.client.ElevenLabs()
|
||||
speech_stream = client.text_to_speech.convert_as_stream(
|
||||
text=query, model_id=self.model, voice_id=self.voice
|
||||
)
|
||||
elevenlabs.stream(speech_stream)
|
||||
|
@ -30,7 +30,7 @@ class JinaSearch(BaseTool): # type: ignore[override]
|
||||
"each in clean, LLM-friendly text. This way, you can always keep your LLM "
|
||||
"up-to-date, improve its factuality, and reduce hallucinations."
|
||||
)
|
||||
search_wrapper: JinaSearchAPIWrapper = Field(default_factory=JinaSearchAPIWrapper)
|
||||
search_wrapper: JinaSearchAPIWrapper = Field(default_factory=JinaSearchAPIWrapper) # type: ignore[arg-type]
|
||||
|
||||
def _run(
|
||||
self,
|
||||
|
@ -1,5 +1,6 @@
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional
|
||||
|
||||
from langchain_core._api import deprecated
|
||||
from langchain_core.documents import Document
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from pydantic import BaseModel, model_validator
|
||||
@ -8,6 +9,16 @@ if TYPE_CHECKING:
|
||||
from langchain_community.document_loaders import ApifyDatasetLoader
|
||||
|
||||
|
||||
@deprecated(
|
||||
since="0.3.18",
|
||||
message=(
|
||||
"This class is deprecated and will be removed in a future version. "
|
||||
"You can swap to using the `ApifyWrapper`"
|
||||
" implementation in `langchain_apify` package. "
|
||||
"See <https://github.com/apify/langchain-apify>"
|
||||
),
|
||||
alternative_import="langchain_apify.ApifyWrapper",
|
||||
)
|
||||
class ApifyWrapper(BaseModel):
|
||||
"""Wrapper around Apify.
|
||||
To use, you should have the ``apify-client`` python package installed,
|
||||
|
@ -1,18 +1,34 @@
|
||||
import json
|
||||
from typing import List
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import requests
|
||||
from langchain_core.documents import Document
|
||||
from pydantic import BaseModel
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from pydantic import BaseModel, ConfigDict, SecretStr, model_validator
|
||||
from yarl import URL
|
||||
|
||||
|
||||
class JinaSearchAPIWrapper(BaseModel):
|
||||
"""Wrapper around the Jina search engine."""
|
||||
|
||||
api_key: SecretStr
|
||||
|
||||
base_url: str = "https://s.jina.ai/"
|
||||
"""The base URL for the Jina search engine."""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="forbid",
|
||||
)
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_environment(cls, values: Dict) -> Any:
|
||||
"""Validate that api key and endpoint exists in environment."""
|
||||
api_key = get_from_dict_or_env(values, "api_key", "JINA_API_KEY")
|
||||
values["api_key"] = api_key
|
||||
|
||||
return values
|
||||
|
||||
def run(self, query: str) -> str:
|
||||
"""Query the Jina search engine and return the results as a JSON string.
|
||||
|
||||
@ -59,6 +75,7 @@ class JinaSearchAPIWrapper(BaseModel):
|
||||
def _search_request(self, query: str) -> List[dict]:
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"Authorization": f"Bearer {self.api_key.get_secret_value()}",
|
||||
}
|
||||
url = str(URL(self.base_url + query))
|
||||
response = requests.get(url, headers=headers)
|
||||
|
@ -139,6 +139,14 @@ class SQLDatabase:
|
||||
return cls(create_engine(database_uri, **_engine_args), **kwargs)
|
||||
|
||||
@classmethod
|
||||
@deprecated(
|
||||
"0.3.18",
|
||||
message="For performing structured retrieval using Databricks SQL, "
|
||||
"see the latest best practices and recommended APIs at "
|
||||
"https://docs.unitycatalog.io/ai/integrations/langchain/ " # noqa: E501
|
||||
"instead",
|
||||
removal="1.0",
|
||||
)
|
||||
def from_databricks(
|
||||
cls,
|
||||
catalog: str,
|
||||
|
@ -51,9 +51,9 @@ test = [
|
||||
"blockbuster<1.6,>=1.5.13",
|
||||
"cffi<1.17.1; python_version < \"3.10\"",
|
||||
"cffi; python_version >= \"3.10\"",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain @ file:///${PROJECT_ROOT}/../langchain",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests",
|
||||
"langchain-core",
|
||||
"langchain",
|
||||
"langchain-tests",
|
||||
"toml>=0.10.2",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
@ -63,11 +63,7 @@ lint = [
|
||||
"cffi<1.17.1; python_version < \"3.10\"",
|
||||
"cffi; python_version >= \"3.10\"",
|
||||
]
|
||||
dev = [
|
||||
"jupyter<2.0.0,>=1.0.0",
|
||||
"setuptools<68.0.0,>=67.6.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
dev = ["jupyter<2.0.0,>=1.0.0", "setuptools<68.0.0,>=67.6.1", "langchain-core"]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.12",
|
||||
"types-pyyaml<7.0.0.0,>=6.0.12.2",
|
||||
@ -77,11 +73,17 @@ typing = [
|
||||
"types-chardet<6.0.0.0,>=5.0.4.6",
|
||||
"types-redis<5.0.0.0,>=4.3.21.6",
|
||||
"mypy-protobuf<4.0.0,>=3.0.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
"langchain @ file:///${PROJECT_ROOT}/../langchain",
|
||||
"langchain-core",
|
||||
"langchain-text-splitters",
|
||||
"langchain",
|
||||
]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../core", editable = true }
|
||||
langchain = { path = "../langchain", editable = true }
|
||||
langchain-tests = { path = "../standard-tests", editable = true }
|
||||
langchain-text-splitters = { path = "../text-splitters", editable = true }
|
||||
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py39"
|
||||
|
@ -0,0 +1,27 @@
|
||||
import os
|
||||
import unittest
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
from langchain_community.tools.jina_search.tool import JinaSearch
|
||||
from langchain_community.utilities.jina_search import JinaSearchAPIWrapper
|
||||
|
||||
os.environ["JINA_API_KEY"] = "test_key"
|
||||
|
||||
|
||||
class TestJinaSearchTool(unittest.TestCase):
|
||||
@patch(
|
||||
"langchain_community.tools.jina_search.tool.JinaSearch.invoke",
|
||||
return_value="mocked_result",
|
||||
)
|
||||
def test_invoke(self, mock_run: Any) -> None:
|
||||
query = "Test query text"
|
||||
wrapper = JinaSearchAPIWrapper(api_key="test_key") # type: ignore[arg-type]
|
||||
jina_search_tool = JinaSearch(api_wrapper=wrapper) # type: ignore[call-arg]
|
||||
results = jina_search_tool.invoke(query)
|
||||
expected_result = "mocked_result"
|
||||
assert results == expected_result
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -1492,7 +1492,7 @@ wheels = [
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.3.18"
|
||||
source = { directory = "../langchain" }
|
||||
source = { editable = "../langchain" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11'" },
|
||||
@ -1516,7 +1516,7 @@ requires-dist = [
|
||||
{ name = "langchain-aws", marker = "extra == 'aws'" },
|
||||
{ name = "langchain-cohere", marker = "extra == 'cohere'" },
|
||||
{ name = "langchain-community", marker = "extra == 'community'" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-deepseek", marker = "extra == 'deepseek'" },
|
||||
{ name = "langchain-fireworks", marker = "extra == 'fireworks'" },
|
||||
{ name = "langchain-google-genai", marker = "extra == 'google-genai'" },
|
||||
@ -1525,8 +1525,8 @@ requires-dist = [
|
||||
{ name = "langchain-huggingface", marker = "extra == 'huggingface'" },
|
||||
{ name = "langchain-mistralai", marker = "extra == 'mistralai'" },
|
||||
{ name = "langchain-ollama", marker = "extra == 'ollama'" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'" },
|
||||
{ name = "langchain-text-splitters", specifier = ">=0.3.6,<1.0.0" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'", editable = "../partners/openai" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "langchain-together", marker = "extra == 'together'" },
|
||||
{ name = "langsmith", specifier = ">=0.1.17,<0.4" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.26.4,<2" },
|
||||
@ -1542,8 +1542,8 @@ requires-dist = [
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "playwright", specifier = ">=1.28.0,<2.0.0" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
@ -1553,14 +1553,15 @@ lint = [
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = ">=1.5.14,<1.6" },
|
||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "duckdb-engine", specifier = ">=0.9.2,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-openai", directory = "../partners/openai" },
|
||||
{ name = "langchain-tests", directory = "../standard-tests" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-openai", editable = "../partners/openai" },
|
||||
{ name = "langchain-tests", editable = "../standard-tests" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "lark", specifier = ">=1.1.5,<2.0.0" },
|
||||
{ name = "packaging", specifier = ">=24.2" },
|
||||
{ name = "pandas", specifier = ">=2.0.0,<3.0.0" },
|
||||
@ -1579,8 +1580,8 @@ test = [
|
||||
]
|
||||
test-integration = [
|
||||
{ name = "cassio", specifier = ">=0.1.0,<1.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "langchainhub", specifier = ">=0.1.16,<1.0.0" },
|
||||
{ name = "pytest-vcr", specifier = ">=1.0.2,<2.0.0" },
|
||||
{ name = "python-dotenv", specifier = ">=1.0.0,<2.0.0" },
|
||||
@ -1588,8 +1589,8 @@ test-integration = [
|
||||
{ name = "wrapt", specifier = ">=1.15.0,<2.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "mypy-protobuf", specifier = ">=3.0.0,<4.0.0" },
|
||||
{ name = "types-chardet", specifier = ">=5.0.4.6,<6.0.0.0" },
|
||||
@ -1681,8 +1682,8 @@ requires-dist = [
|
||||
{ name = "aiohttp", specifier = ">=3.8.3,<4.0.0" },
|
||||
{ name = "dataclasses-json", specifier = ">=0.5.7,<0.7" },
|
||||
{ name = "httpx-sse", specifier = ">=0.4.0,<1.0.0" },
|
||||
{ name = "langchain", specifier = ">=0.3.18,<1.0.0" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain", editable = "../langchain" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langsmith", specifier = ">=0.1.125,<0.4" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.26.4,<2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
@ -1697,7 +1698,7 @@ requires-dist = [
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
lint = [
|
||||
@ -1711,9 +1712,9 @@ test = [
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "duckdb-engine", specifier = ">=0.13.6,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain", directory = "../langchain" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-tests", directory = "../standard-tests" },
|
||||
{ name = "langchain", editable = "../langchain" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-tests", editable = "../standard-tests" },
|
||||
{ name = "lark", specifier = ">=1.1.5,<2.0.0" },
|
||||
{ name = "pandas", specifier = ">=2.0.0,<3.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.4.4,<8.0.0" },
|
||||
@ -1734,9 +1735,9 @@ test-integration = [
|
||||
{ name = "vcrpy", specifier = ">=6,<7" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain", directory = "../langchain" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain", editable = "../langchain" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.12,<2.0" },
|
||||
{ name = "mypy-protobuf", specifier = ">=3.0.0,<4.0.0" },
|
||||
{ name = "types-chardet", specifier = ">=5.0.4.6,<6.0.0.0" },
|
||||
@ -1749,8 +1750,8 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34"
|
||||
source = { directory = "../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -1808,7 +1809,7 @@ typing = [
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.11"
|
||||
source = { directory = "../standard-tests" }
|
||||
source = { editable = "../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
@ -1823,7 +1824,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -1835,36 +1836,36 @@ requires-dist = [
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", directory = "../core" }]
|
||||
test = [{ name = "langchain-core", editable = "../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.6"
|
||||
source = { directory = "../text-splitters" }
|
||||
source = { editable = "../text-splitters" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" }]
|
||||
requires-dist = [{ name = "langchain-core", editable = "../core" }]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
]
|
||||
lint = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
|
@ -23,7 +23,7 @@ test_watch:
|
||||
-u LANGCHAIN_API_KEY \
|
||||
-u LANGSMITH_TRACING \
|
||||
-u LANGCHAIN_PROJECT \
|
||||
uv run --group test ptw --snapshot-update --now . --disable-socket --allow-unix-socket -- -vv $(TEST_FILE)
|
||||
uv run --group test ptw --snapshot-update --now . --disable-socket --allow-unix-socket -vv -- $(TEST_FILE)
|
||||
|
||||
test_profile:
|
||||
uv run --group test pytest -vv tests/unit_tests/ --profile-svg
|
||||
|
@ -236,7 +236,10 @@ def _create_message_from_message_type(
|
||||
if tool_call_id is not None:
|
||||
kwargs["tool_call_id"] = tool_call_id
|
||||
if additional_kwargs:
|
||||
if response_metadata := additional_kwargs.pop("response_metadata", None):
|
||||
kwargs["response_metadata"] = response_metadata
|
||||
kwargs["additional_kwargs"] = additional_kwargs # type: ignore[assignment]
|
||||
additional_kwargs.update(additional_kwargs.pop("additional_kwargs", {}))
|
||||
if id is not None:
|
||||
kwargs["id"] = id
|
||||
if tool_calls is not None:
|
||||
@ -258,8 +261,12 @@ def _create_message_from_message_type(
|
||||
else:
|
||||
kwargs["tool_calls"].append(tool_call)
|
||||
if message_type in ("human", "user"):
|
||||
if example := kwargs.get("additional_kwargs", {}).pop("example", False):
|
||||
kwargs["example"] = example
|
||||
message: BaseMessage = HumanMessage(content=content, **kwargs)
|
||||
elif message_type in ("ai", "assistant"):
|
||||
if example := kwargs.get("additional_kwargs", {}).pop("example", False):
|
||||
kwargs["example"] = example
|
||||
message = AIMessage(content=content, **kwargs)
|
||||
elif message_type in ("system", "developer"):
|
||||
if message_type == "developer":
|
||||
|
@ -59,6 +59,17 @@ class InMemoryVectorStore(VectorStore):
|
||||
documents = [document_1, document_2, document_3]
|
||||
vector_store.add_documents(documents=documents)
|
||||
|
||||
Inspect documents:
|
||||
.. code-block:: python
|
||||
|
||||
top_n = 10
|
||||
for index, (id, doc) in enumerate(vector_store.store.items()):
|
||||
if index < top_n:
|
||||
# docs have keys 'id', 'vector', 'text', 'metadata'
|
||||
print(f"{id}: {doc['text']}")
|
||||
else:
|
||||
break
|
||||
|
||||
Delete Documents:
|
||||
.. code-block:: python
|
||||
|
||||
|
@ -17,7 +17,7 @@ dependencies = [
|
||||
"pydantic<3.0.0,>=2.7.4; python_full_version >= \"3.12.4\"",
|
||||
]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34"
|
||||
version = "0.3.35"
|
||||
description = "Building applications with LLMs through composability"
|
||||
readme = "README.md"
|
||||
|
||||
|
@ -738,6 +738,15 @@ def test_convert_to_messages() -> None:
|
||||
"artifact": {"foo": 123},
|
||||
},
|
||||
{"role": "remove", "id": "message_to_remove", "content": ""},
|
||||
{
|
||||
"content": "Now the turn for Larry to ask a question about the book!",
|
||||
"additional_kwargs": {"metadata": {"speaker_name": "Presenter"}},
|
||||
"response_metadata": {},
|
||||
"type": "human",
|
||||
"name": None,
|
||||
"id": "1",
|
||||
"example": False,
|
||||
},
|
||||
]
|
||||
)
|
||||
expected = [
|
||||
@ -762,6 +771,13 @@ def test_convert_to_messages() -> None:
|
||||
ToolMessage(tool_call_id="tool_id", content="Hi!"),
|
||||
ToolMessage(tool_call_id="tool_id2", content="Bye!", artifact={"foo": 123}),
|
||||
RemoveMessage(id="message_to_remove"),
|
||||
HumanMessage(
|
||||
content="Now the turn for Larry to ask a question about the book!",
|
||||
additional_kwargs={"metadata": {"speaker_name": "Presenter"}},
|
||||
response_metadata={},
|
||||
id="1",
|
||||
example=False,
|
||||
),
|
||||
]
|
||||
assert expected == actual
|
||||
|
||||
|
@ -935,7 +935,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34"
|
||||
version = "0.3.35"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
@ -1026,7 +1026,7 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.10"
|
||||
version = "0.3.11"
|
||||
source = { directory = "../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
@ -1042,7 +1042,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "." },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -1054,36 +1054,36 @@ requires-dist = [
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", directory = "." }]
|
||||
test = [{ name = "langchain-core", editable = "." }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "." },
|
||||
{ name = "langchain-core", editable = "." },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.5"
|
||||
version = "0.3.6"
|
||||
source = { directory = "../text-splitters" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" }]
|
||||
requires-dist = [{ name = "langchain-core", editable = "." }]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "." },
|
||||
{ name = "langchain-core", editable = "." },
|
||||
]
|
||||
lint = [
|
||||
{ name = "langchain-core", directory = "." },
|
||||
{ name = "langchain-core", editable = "." },
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "." },
|
||||
{ name = "langchain-core", editable = "." },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
|
@ -69,10 +69,10 @@ test = [
|
||||
"blockbuster<1.6,>=1.5.14",
|
||||
"cffi<1.17.1; python_version < \"3.10\"",
|
||||
"cffi; python_version >= \"3.10\"",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
"langchain-openai @ file:///${PROJECT_ROOT}/../partners/openai",
|
||||
"langchain-tests",
|
||||
"langchain-core",
|
||||
"langchain-text-splitters",
|
||||
"langchain-openai",
|
||||
"toml>=0.10.2",
|
||||
"packaging>=24.2",
|
||||
]
|
||||
@ -84,8 +84,8 @@ test_integration = [
|
||||
"python-dotenv<2.0.0,>=1.0.0",
|
||||
"cassio<1.0.0,>=0.1.0",
|
||||
"langchainhub<1.0.0,>=0.1.16",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
"langchain-core",
|
||||
"langchain-text-splitters",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.9.2",
|
||||
@ -101,18 +101,24 @@ typing = [
|
||||
"types-pytz<2024.0.0.0,>=2023.3.0.0",
|
||||
"types-chardet<6.0.0.0,>=5.0.4.6",
|
||||
"mypy-protobuf<4.0.0,>=3.0.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
"langchain-core",
|
||||
"langchain-text-splitters",
|
||||
]
|
||||
dev = [
|
||||
"jupyter<2.0.0,>=1.0.0",
|
||||
"playwright<2.0.0,>=1.28.0",
|
||||
"setuptools<68.0.0,>=67.6.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
"langchain-core",
|
||||
"langchain-text-splitters",
|
||||
]
|
||||
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../core", editable = true }
|
||||
langchain-tests = { path = "../standard-tests", editable = true }
|
||||
langchain-text-splitters = { path = "../text-splitters", editable = true }
|
||||
langchain-openai = { path = "../partners/openai", editable = true }
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py39"
|
||||
exclude = ["tests/integration_tests/examples/non-utf8-encoding.py"]
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -5,388 +5,427 @@ packages:
|
||||
- name: langchain-core
|
||||
path: libs/core
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 27728892
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 27722594
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-text-splitters
|
||||
path: libs/text-splitters
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 10343427
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 12866727
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain
|
||||
path: libs/langchain
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 27515102
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 32917727
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-community
|
||||
path: libs/community
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 17505668
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 21967466
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-experimental
|
||||
path: libs/experimental
|
||||
repo: langchain-ai/langchain-experimental
|
||||
downloads: 1710421
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 1960508
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-cli
|
||||
path: libs/cli
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 55505
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 84415
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-ai21
|
||||
path: libs/ai21
|
||||
repo: langchain-ai/langchain-ai21
|
||||
downloads: 3453
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 13100
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-anthropic
|
||||
path: libs/partners/anthropic
|
||||
repo: langchain-ai/langchain
|
||||
js: '@langchain/anthropic'
|
||||
downloads: 1163020
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 1549411
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-chroma
|
||||
path: libs/partners/chroma
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 450092
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 553991
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-exa
|
||||
path: libs/partners/exa
|
||||
repo: langchain-ai/langchain
|
||||
provider_page: exa_search
|
||||
js: '@langchain/exa'
|
||||
downloads: 4560
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 5817
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-fireworks
|
||||
path: libs/partners/fireworks
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 73179
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 264866
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-groq
|
||||
path: libs/partners/groq
|
||||
repo: langchain-ai/langchain
|
||||
js: '@langchain/groq'
|
||||
downloads: 370373
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 452801
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-huggingface
|
||||
path: libs/partners/huggingface
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 375151
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 403346
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-ibm
|
||||
path: libs/ibm
|
||||
repo: langchain-ai/langchain-ibm
|
||||
js: '@langchain/ibm'
|
||||
downloads: 261091
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 95572
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-localai
|
||||
path: libs/localai
|
||||
repo: mkhludnev/langchain-localai
|
||||
downloads: 510
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 306
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-milvus
|
||||
path: libs/milvus
|
||||
repo: langchain-ai/langchain-milvus
|
||||
downloads: 111126
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 162619
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-mistralai
|
||||
path: libs/partners/mistralai
|
||||
repo: langchain-ai/langchain
|
||||
js: '@langchain/mistralai'
|
||||
downloads: 232463
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 315149
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-mongodb
|
||||
path: libs/langchain-mongodb
|
||||
repo: langchain-ai/langchain-mongodb
|
||||
provider_page: mongodb_atlas
|
||||
js: '@langchain/mongodb'
|
||||
downloads: 113328
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 160711
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
disabled: true
|
||||
- name: langchain-nomic
|
||||
path: libs/partners/nomic
|
||||
repo: langchain-ai/langchain
|
||||
js: '@langchain/nomic'
|
||||
downloads: 10175
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 10335
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-openai
|
||||
path: libs/partners/openai
|
||||
repo: langchain-ai/langchain
|
||||
js: '@langchain/openai'
|
||||
downloads: 7994138
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 9823331
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-pinecone
|
||||
path: libs/partners/pinecone
|
||||
repo: langchain-ai/langchain
|
||||
js: '@langchain/pinecone'
|
||||
downloads: 345657
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 393153
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-prompty
|
||||
path: libs/partners/prompty
|
||||
repo: langchain-ai/langchain
|
||||
provider_page: microsoft
|
||||
downloads: 976
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 1216
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-qdrant
|
||||
path: libs/partners/qdrant
|
||||
repo: langchain-ai/langchain
|
||||
js: '@langchain/qdrant'
|
||||
downloads: 77743
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 125551
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-scrapegraph
|
||||
path: .
|
||||
repo: ScrapeGraphAI/langchain-scrapegraph
|
||||
downloads: 907
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 851
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-sema4
|
||||
path: libs/sema4
|
||||
repo: langchain-ai/langchain-sema4
|
||||
provider_page: robocorp
|
||||
downloads: 987
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 1647
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-together
|
||||
path: libs/together
|
||||
repo: langchain-ai/langchain-together
|
||||
downloads: 44887
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 53987
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-upstage
|
||||
path: libs/upstage
|
||||
repo: langchain-ai/langchain-upstage
|
||||
downloads: 20951
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 29553
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-voyageai
|
||||
path: libs/partners/voyageai
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 11253
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 17269
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-aws
|
||||
name_title: AWS
|
||||
path: libs/aws
|
||||
repo: langchain-ai/langchain-aws
|
||||
js: '@langchain/aws'
|
||||
downloads: 1507701
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 2133380
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-astradb
|
||||
path: libs/astradb
|
||||
repo: langchain-ai/langchain-datastax
|
||||
downloads: 64185
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 83037
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-google-genai
|
||||
name_title: Google Generative AI
|
||||
path: libs/genai
|
||||
repo: langchain-ai/langchain-google
|
||||
provider_page: google
|
||||
js: '@langchain/google-genai'
|
||||
downloads: 732265
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 1019707
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-google-vertexai
|
||||
path: libs/vertexai
|
||||
repo: langchain-ai/langchain-google
|
||||
provider_page: google
|
||||
js: '@langchain/google-vertexai'
|
||||
downloads: 7668881
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 13033464
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-google-community
|
||||
path: libs/community
|
||||
repo: langchain-ai/langchain-google
|
||||
provider_page: google
|
||||
downloads: 3055901
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 3787822
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-weaviate
|
||||
path: libs/weaviate
|
||||
repo: langchain-ai/langchain-weaviate
|
||||
js: '@langchain/weaviate'
|
||||
downloads: 26639
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 31199
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-cohere
|
||||
path: libs/cohere
|
||||
repo: langchain-ai/langchain-cohere
|
||||
js: '@langchain/cohere'
|
||||
downloads: 513053
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 653329
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-elasticsearch
|
||||
path: libs/elasticsearch
|
||||
repo: langchain-ai/langchain-elastic
|
||||
downloads: 108874
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 137212
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-nvidia-ai-endpoints
|
||||
path: libs/ai-endpoints
|
||||
repo: langchain-ai/langchain-nvidia
|
||||
provider_page: nvidia
|
||||
downloads: 129677
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 157267
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-postgres
|
||||
path: .
|
||||
repo: langchain-ai/langchain-postgres
|
||||
provider_page: pgvector
|
||||
downloads: 293866
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 320831
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-redis
|
||||
path: libs/redis
|
||||
repo: langchain-ai/langchain-redis
|
||||
js: '@langchain/redis'
|
||||
downloads: 17549
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 22787
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-unstructured
|
||||
path: libs/unstructured
|
||||
repo: langchain-ai/langchain-unstructured
|
||||
downloads: 88721
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 118888
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-azure-dynamic-sessions
|
||||
path: libs/azure-dynamic-sessions
|
||||
repo: langchain-ai/langchain-azure
|
||||
provider_page: microsoft
|
||||
js: '@langchain/azure-dynamic-sessions'
|
||||
downloads: 7285
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 7401
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-sqlserver
|
||||
path: libs/sqlserver
|
||||
repo: langchain-ai/langchain-azure
|
||||
provider_page: microsoft
|
||||
downloads: 1489
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 2298
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-cerebras
|
||||
path: libs/cerebras
|
||||
repo: langchain-ai/langchain-cerebras
|
||||
downloads: 9426
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 26690
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-snowflake
|
||||
path: libs/snowflake
|
||||
repo: langchain-ai/langchain-snowflake
|
||||
downloads: 2374
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 1905
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: databricks-langchain
|
||||
name_title: Databricks
|
||||
path: integrations/langchain
|
||||
repo: databricks/databricks-ai-bridge
|
||||
provider_page: databricks
|
||||
name_title: Databricks
|
||||
downloads: 35495
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 36221
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-couchbase
|
||||
path: .
|
||||
repo: Couchbase-Ecosystem/langchain-couchbase
|
||||
downloads: 347
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 725
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-ollama
|
||||
path: libs/partners/ollama
|
||||
repo: langchain-ai/langchain
|
||||
js: '@langchain/ollama'
|
||||
downloads: 310741
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 623011
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-box
|
||||
path: libs/box
|
||||
repo: box-community/langchain-box
|
||||
downloads: 2749
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 730
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-tests
|
||||
path: libs/standard-tests
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 3691
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 180354
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-neo4j
|
||||
path: libs/neo4j
|
||||
repo: langchain-ai/langchain-neo4j
|
||||
downloads: 8871
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 30320
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-linkup
|
||||
path: .
|
||||
repo: LinkupPlatform/langchain-linkup
|
||||
downloads: 818
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 532
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-yt-dlp
|
||||
path: .
|
||||
repo: aqib0770/langchain-yt-dlp
|
||||
downloads: 776
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 461
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-oceanbase
|
||||
path: .
|
||||
repo: oceanbase/langchain-oceanbase
|
||||
downloads: 322
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 58
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-predictionguard
|
||||
path: .
|
||||
repo: predictionguard/langchain-predictionguard
|
||||
downloads: 156
|
||||
downloads_updated_at: '2024-12-23T20:10:11.816059+00:00'
|
||||
downloads: 422
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-cratedb
|
||||
path: .
|
||||
repo: crate/langchain-cratedb
|
||||
downloads: 362
|
||||
downloads_updated_at: '2024-12-23T20:53:27.001852+00:00'
|
||||
downloads: 417
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-modelscope
|
||||
path: .
|
||||
repo: modelscope/langchain-modelscope
|
||||
downloads: 0
|
||||
downloads: 131
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-falkordb
|
||||
path: .
|
||||
repo: kingtroga/langchain-falkordb
|
||||
downloads: 610
|
||||
downloads_updated_at: '2025-01-02T20:23:02.544257+00:00'
|
||||
downloads: 178
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-dappier
|
||||
path: .
|
||||
repo: DappierAI/langchain-dappier
|
||||
downloads: 0
|
||||
downloads: 353
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-pull-md
|
||||
path: .
|
||||
repo: chigwell/langchain-pull-md
|
||||
downloads: 0
|
||||
downloads: 161
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-kuzu
|
||||
path: .
|
||||
repo: kuzudb/langchain-kuzu
|
||||
downloads: 0
|
||||
downloads: 426
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-docling
|
||||
path: .
|
||||
repo: DS4SD/docling-langchain
|
||||
downloads: 0
|
||||
- name: langchain-lindorm
|
||||
downloads: 6800
|
||||
downloads_updated_at: '2025-02-13T20:29:06.035211+00:00'
|
||||
- name: langchain-lindorm-integration
|
||||
path: .
|
||||
repo: AlwaysBluer/langchain-lindorm-integration
|
||||
downloads: 0
|
||||
provider_page: lindorm
|
||||
downloads: 79
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-hyperbrowser
|
||||
path: .
|
||||
repo: hyperbrowserai/langchain-hyperbrowser
|
||||
downloads: 0
|
||||
downloads: 371
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-fmp-data
|
||||
path: .
|
||||
repo: MehdiZare/langchain-fmp-data
|
||||
downloads: 0
|
||||
downloads: 366
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: tilores-langchain
|
||||
name_title: Tilores
|
||||
path: .
|
||||
repo: tilotech/tilores-langchain
|
||||
provider_page: tilores
|
||||
name_title: Tilores
|
||||
downloads: 0
|
||||
downloads: 121
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-pipeshift
|
||||
path: .
|
||||
repo: pipeshift-org/langchain-pipeshift
|
||||
downloads: 115
|
||||
downloads: 133
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-payman-tool
|
||||
path: .
|
||||
repo: paymanai/langchain-payman-tool
|
||||
downloads: 0
|
||||
downloads_updated_at: "2025-01-22T00:00:00+00:00"
|
||||
downloads: 685
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-sambanova
|
||||
repo: sambanova/langchain-sambanova
|
||||
path: .
|
||||
downloads: 0
|
||||
repo: sambanova/langchain-sambanova
|
||||
downloads: 1313
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-deepseek
|
||||
repo: langchain-ai/langchain
|
||||
path: libs/partners/deepseek
|
||||
downloads: 0
|
||||
js: '@langchain/deepseek'
|
||||
repo: langchain-ai/langchain
|
||||
provider_page: deepseek
|
||||
js: '@langchain/deepseek'
|
||||
downloads: 6871
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-jenkins
|
||||
path: .
|
||||
repo: Amitgb14/langchain_jenkins
|
||||
downloads: 0
|
||||
downloads: 386
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-goodfire
|
||||
path: .
|
||||
repo: keenanpepper/langchain-goodfire
|
||||
downloads: 51
|
||||
downloads_updated_at: '2025-01-30T00:00:00+00:00'
|
||||
downloads: 585
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-nimble
|
||||
path: .
|
||||
repo: Nimbleway/langchain-nimble
|
||||
downloads: 388
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-apify
|
||||
path: .
|
||||
downloads: 0
|
||||
repo: apify/langchain-apify
|
||||
downloads: 443
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langfair
|
||||
repo: cvs-health/langfair
|
||||
name_title: LangFair
|
||||
path: .
|
||||
repo: cvs-health/langfair
|
||||
downloads: 901
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-abso
|
||||
path: .
|
||||
repo: lunary-ai/langchain-abso
|
||||
downloads: 0
|
||||
downloads_updated_at: '2025-02-13T20:30:13.814314+00:00'
|
||||
- name: langchain-graph-retriever
|
||||
name_title: Graph RAG
|
||||
path: packages/langchain-graph-retriever
|
||||
repo: datastax/graph-rag
|
||||
provider_page: graph_rag
|
||||
downloads: 2093
|
||||
downloads_updated_at: '2025-02-13T20:32:23.744801+00:00'
|
||||
- name: langchain-xai
|
||||
path: libs/partners/xai
|
||||
repo: langchain-ai/langchain
|
||||
downloads: 9521
|
||||
downloads_updated_at: '2025-02-13T23:35:48.490391+00:00'
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"anthropic<1,>=0.45.0",
|
||||
@ -32,29 +32,24 @@ test = [
|
||||
"defusedxml<1.0.0,>=0.7.1",
|
||||
"pytest-timeout<3.0.0,>=2.3.1",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../../standard-tests",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
test_integration = [
|
||||
"requests<3.0.0,>=2.32.3",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-core",
|
||||
"langchain-tests",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
dev = ["langchain-core"]
|
||||
test_integration = ["requests<3.0.0,>=2.32.3", "langchain-core"]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"types-requests<3.0.0.0,>=2.32.0.20241016",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-core",
|
||||
]
|
||||
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-tests = { path = "../../standard-tests", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
plugins = ['pydantic.mypy']
|
||||
|
@ -450,19 +450,19 @@ typing = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "anthropic", specifier = ">=0.45.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "pydantic", specifier = ">=2.7.4,<3.0.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "defusedxml", specifier = ">=0.7.1,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
@ -472,19 +472,19 @@ test = [
|
||||
{ name = "syrupy", specifier = ">=4.0.2,<5.0.0" },
|
||||
]
|
||||
test-integration = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "requests", specifier = ">=2.32.3,<3.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-requests", specifier = ">=2.32.0.20241016,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -542,7 +542,7 @@ typing = [
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.11"
|
||||
source = { directory = "../../standard-tests" }
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
@ -557,7 +557,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -569,10 +569,10 @@ requires-dist = [
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", directory = "../core" }]
|
||||
test = [{ name = "langchain-core", editable = "../../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
|
@ -547,7 +547,9 @@ class Chroma(VectorStore):
|
||||
metadatas = [metadatas[idx] for idx in non_empty_ids]
|
||||
texts_with_metadatas = [texts[idx] for idx in non_empty_ids]
|
||||
embeddings_with_metadatas = (
|
||||
[embeddings[idx] for idx in non_empty_ids] if embeddings else None
|
||||
[embeddings[idx] for idx in non_empty_ids]
|
||||
if embeddings is not None and len(embeddings) > 0
|
||||
else None
|
||||
)
|
||||
ids_with_metadata = [ids[idx] for idx in non_empty_ids]
|
||||
try:
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.2,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43",
|
||||
@ -13,7 +13,7 @@ dependencies = [
|
||||
"chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0",
|
||||
]
|
||||
name = "langchain-chroma"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
description = "An integration package connecting Chroma and LangChain"
|
||||
readme = "README.md"
|
||||
|
||||
@ -33,31 +33,28 @@ test = [
|
||||
"onnxruntime<1.20; python_version < \"3.10\"",
|
||||
"onnxruntime; python_version >= \"3.10\"",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core ; python_version >= \"3.9\"",
|
||||
"langchain-core<0.3,>=0.1.40; python_version < \"3.9\"",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../../standard-tests",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
"langchain-core",
|
||||
"langchain-tests",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
test_integration = []
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
"onnxruntime<1.20; python_version < \"3.10\"",
|
||||
"onnxruntime; python_version >= \"3.10\"",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core ; python_version >= \"3.9\"",
|
||||
"langchain-core<0.3,>=0.1.40; python_version < \"3.9\"",
|
||||
]
|
||||
dev = ["langchain-core"]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"types-requests<3.0.0.0,>=2.31.0.20240406",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core ; python_version >= \"3.9\"",
|
||||
"langchain-core<0.3,>=0.1.40; python_version < \"3.9\"",
|
||||
"langchain-core",
|
||||
]
|
||||
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-tests = { path = "../../standard-tests", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = true
|
||||
|
||||
|
@ -732,7 +732,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-chroma"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "chromadb" },
|
||||
@ -774,17 +774,14 @@ typing = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "chromadb", specifier = ">=0.4.0,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,!=0.5.10,!=0.5.11,!=0.5.12,<0.7.0" },
|
||||
{ name = "langchain-core", specifier = ">=0.2.43,!=0.3.0,!=0.3.1,!=0.3.2,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.22.4,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<2.0.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "langchain-core", marker = "python_full_version < '3.9'", specifier = ">=0.1.40,<0.3" },
|
||||
{ name = "langchain-core", marker = "python_full_version >= '3.9'", directory = "../../core" },
|
||||
]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [
|
||||
{ name = "onnxruntime", marker = "python_full_version < '3.10'", specifier = "<1.20" },
|
||||
{ name = "onnxruntime", marker = "python_full_version >= '3.10'" },
|
||||
@ -792,9 +789,8 @@ lint = [
|
||||
]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", marker = "python_full_version < '3.9'", specifier = ">=0.1.40,<0.3" },
|
||||
{ name = "langchain-core", marker = "python_full_version >= '3.9'", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "onnxruntime", marker = "python_full_version < '3.10'", specifier = "<1.20" },
|
||||
{ name = "onnxruntime", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
@ -806,16 +802,15 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", marker = "python_full_version < '3.9'", specifier = ">=0.1.40,<0.3" },
|
||||
{ name = "langchain-core", marker = "python_full_version >= '3.9'", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-requests", specifier = ">=2.31.0.20240406,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -872,8 +867,8 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.10"
|
||||
source = { directory = "../../standard-tests" }
|
||||
version = "0.3.11"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
@ -887,7 +882,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -896,6 +891,16 @@ requires-dist = [
|
||||
{ name = "syrupy", specifier = ">=4,<5" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", editable = "../../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.3.5"
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.34",
|
||||
@ -27,22 +27,20 @@ test = [
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"langchain-tests<1.0.0,>=0.3.5",
|
||||
"langchain-openai @ file:///${PROJECT_ROOT}/../openai",
|
||||
"langchain-openai",
|
||||
"pytest-timeout<3.0.0,>=2.3.1",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.6",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.6"]
|
||||
test_integration = []
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
dev = []
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
]
|
||||
typing = ["mypy<2.0,>=1.10"]
|
||||
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-openai = { path = "../openai", editable = true }
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
||||
|
@ -367,8 +367,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -378,9 +378,49 @@ dependencies = [
|
||||
{ name = "tenacity" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b9/c8/a4394a5bdfc820f539bd6983b1408964723ed43ce8cfafbcc7cada69c015/langchain_core-0.3.34.tar.gz", hash = "sha256:26504cf1e8e6c310adad907b890d4e3c147581cfa7434114f6dc1134fe4bc6d3", size = 524756 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/65/27a586c8871a0632d747059eb97855b49ac6dea12b263a79f6c1b4f18b99/langchain_core-0.3.34-py3-none-any.whl", hash = "sha256:a057ebeddd2158d3be14bde341b25640ddf958b6989bd6e47160396f5a8202ae", size = 412955 },
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "jsonpatch", specifier = ">=1.33,<2.0" },
|
||||
{ name = "langsmith", specifier = ">=0.1.125,<0.4" },
|
||||
{ name = "packaging", specifier = ">=23.2,<25" },
|
||||
{ name = "pydantic", marker = "python_full_version < '3.12.4'", specifier = ">=2.5.2,<3.0.0" },
|
||||
{ name = "pydantic", marker = "python_full_version >= '3.12.4'", specifier = ">=2.7.4,<3.0.0" },
|
||||
{ name = "pyyaml", specifier = ">=5.3" },
|
||||
{ name = "tenacity", specifier = ">=8.1.0,!=8.4.0,<10.0.0" },
|
||||
{ name = "typing-extensions", specifier = ">=4.7" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "grandalf", specifier = ">=0.8,<1.0" },
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = "~=1.5.11" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "grandalf", specifier = ">=0.8,<1.0" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<3" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
{ name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" },
|
||||
{ name = "responses", specifier = ">=0.25.0,<1.0.0" },
|
||||
{ name = "syrupy", specifier = ">=4.0.2,<5.0.0" },
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-text-splitters", directory = "../../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<1.11" },
|
||||
{ name = "types-jinja2", specifier = ">=2.11.9,<3.0.0" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" },
|
||||
{ name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -414,8 +454,8 @@ typing = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-openai", specifier = ">=0.3.5,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-openai", editable = "../openai" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
@ -423,7 +463,7 @@ codespell = [{ name = "codespell", specifier = ">=2.2.6,<3.0.0" }]
|
||||
dev = []
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "langchain-openai", directory = "../openai" },
|
||||
{ name = "langchain-openai", editable = "../openai" },
|
||||
{ name = "langchain-tests", specifier = ">=0.3.5,<1.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.4.3,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.23.2,<1.0.0" },
|
||||
@ -437,7 +477,7 @@ typing = [{ name = "mypy", specifier = ">=1.10,<2.0" }]
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.5"
|
||||
source = { directory = "../openai" }
|
||||
source = { editable = "../openai" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
{ name = "openai" },
|
||||
@ -446,19 +486,19 @@ dependencies = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "openai", specifier = ">=1.58.1,<2.0.0" },
|
||||
{ name = "tiktoken", specifier = ">=0.7,<1" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1,<2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<2.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
@ -477,7 +517,7 @@ test-integration = [
|
||||
{ name = "pillow", specifier = ">=10.3.0,<11.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-tqdm", specifier = ">=4.66.0.5,<5.0.0.0" },
|
||||
]
|
||||
|
@ -4,12 +4,9 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.15",
|
||||
"exa-py<2.0.0,>=1.0.8",
|
||||
]
|
||||
dependencies = ["langchain-core<1.0.0,>=0.3.15", "exa-py<2.0.0,>=1.0.8"]
|
||||
name = "langchain-exa"
|
||||
version = "0.2.1"
|
||||
description = "An integration package connecting Exa and LangChain"
|
||||
@ -28,35 +25,36 @@ test = [
|
||||
"syrupy<5.0.0,>=4.0.2",
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-core",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
dev = ["langchain-core"]
|
||||
test_integration = []
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"pydantic>2.0,<2.10", # TODO: support 2.10
|
||||
"langchain-core",
|
||||
"pydantic>2.0,<2.10", # TODO: support 2.10
|
||||
]
|
||||
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [ "E", "F", "I", "T201",]
|
||||
select = ["E", "F", "I", "T201"]
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = [ "tests/*",]
|
||||
omit = ["tests/*"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5"
|
||||
markers = [ "requires: mark tests as requiring a specific library", "compile: mark placeholder test used to compile integration tests without running them",]
|
||||
markers = [
|
||||
"requires: mark tests as requiring a specific library",
|
||||
"compile: mark placeholder test used to compile integration tests without running them",
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
|
@ -393,8 +393,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -486,16 +486,16 @@ typing = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "exa-py", specifier = ">=1.0.8,<2.0.0" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.15,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
@ -504,7 +504,7 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "pydantic", specifier = ">2.0,<2.10" },
|
||||
]
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.33",
|
||||
@ -32,25 +32,18 @@ test = [
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../../standard-tests",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
"langchain-core",
|
||||
"langchain-tests",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
test_integration = []
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"types-requests<3,>=2",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
dev = ["langchain-core"]
|
||||
typing = ["mypy<2.0,>=1.10", "types-requests<3,>=2", "langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-tests = { path = "../../standard-tests", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
@ -635,8 +635,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -734,19 +734,19 @@ typing = [
|
||||
requires-dist = [
|
||||
{ name = "aiohttp", specifier = ">=3.9.1,<4.0.0" },
|
||||
{ name = "fireworks-ai", specifier = ">=0.13.0" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "openai", specifier = ">=1.10.0,<2.0.0" },
|
||||
{ name = "requests", specifier = ">=2,<3" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
@ -756,15 +756,15 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-requests", specifier = ">=2,<3" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.10"
|
||||
source = { directory = "../../standard-tests" }
|
||||
version = "0.3.11"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
@ -779,7 +779,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -788,6 +788,16 @@ requires-dist = [
|
||||
{ name = "syrupy", specifier = ">=4,<5" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", editable = "../../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.3.5"
|
||||
|
@ -4,12 +4,9 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.33",
|
||||
"groq<1,>=0.4.1",
|
||||
]
|
||||
dependencies = ["langchain-core<1.0.0,>=0.3.33", "groq<1,>=0.4.1"]
|
||||
name = "langchain-groq"
|
||||
version = "0.2.4"
|
||||
description = "An integration package connecting Groq and LangChain"
|
||||
@ -26,26 +23,18 @@ test = [
|
||||
"pytest-mock<4.0.0,>=3.10.0",
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../../standard-tests",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
test_integration = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-core",
|
||||
"langchain-tests",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
dev = ["langchain-core"]
|
||||
test_integration = ["langchain-core"]
|
||||
typing = ["mypy<2.0,>=1.10", "langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-tests = { path = "../../standard-tests", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
@ -13,7 +13,6 @@ from langchain_core.messages import (
|
||||
SystemMessage,
|
||||
)
|
||||
from langchain_core.outputs import ChatGeneration, LLMResult
|
||||
from langchain_core.tools import tool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from langchain_groq import ChatGroq
|
||||
@ -394,42 +393,6 @@ def test_json_mode_structured_output() -> None:
|
||||
assert len(result.punchline) != 0
|
||||
|
||||
|
||||
def test_tool_calling_no_arguments() -> None:
|
||||
# Note: this is a variant of a test in langchain_tests
|
||||
# that as of 2024-08-19 fails with "Failed to call a function. Please
|
||||
# adjust your prompt." when `tool_choice="any"` is specified, but
|
||||
# passes when `tool_choice` is not specified.
|
||||
model = ChatGroq(model="llama-3.3-70b-versatile", temperature=0) # type: ignore[call-arg]
|
||||
|
||||
@tool
|
||||
def magic_function_no_args() -> int:
|
||||
"""Calculates a magic function."""
|
||||
return 5
|
||||
|
||||
model_with_tools = model.bind_tools([magic_function_no_args])
|
||||
query = "What is the value of magic_function()? Use the tool."
|
||||
result = model_with_tools.invoke(query)
|
||||
assert isinstance(result, AIMessage)
|
||||
assert len(result.tool_calls) == 1
|
||||
tool_call = result.tool_calls[0]
|
||||
assert tool_call["name"] == "magic_function_no_args"
|
||||
assert tool_call["args"] == {}
|
||||
assert tool_call["id"] is not None
|
||||
assert tool_call["type"] == "tool_call"
|
||||
|
||||
# Test streaming
|
||||
full: Optional[BaseMessageChunk] = None
|
||||
for chunk in model_with_tools.stream(query):
|
||||
full = chunk if full is None else full + chunk # type: ignore
|
||||
assert isinstance(full, AIMessage)
|
||||
assert len(full.tool_calls) == 1
|
||||
tool_call = full.tool_calls[0]
|
||||
assert tool_call["name"] == "magic_function_no_args"
|
||||
assert tool_call["args"] == {}
|
||||
assert tool_call["id"] is not None
|
||||
assert tool_call["type"] == "tool_call"
|
||||
|
||||
|
||||
# Groq does not currently support N > 1
|
||||
# @pytest.mark.scheduled
|
||||
# def test_chat_multiple_completions() -> None:
|
||||
|
@ -48,26 +48,3 @@ class TestGroqLlama(BaseTestGroq):
|
||||
@property
|
||||
def supports_json_mode(self) -> bool:
|
||||
return False # Not supported in streaming mode
|
||||
|
||||
@pytest.mark.xfail(
|
||||
reason=("Fails with 'Failed to call a function. Please adjust your prompt.'")
|
||||
)
|
||||
def test_tool_calling_with_no_arguments(self, model: BaseChatModel) -> None:
|
||||
super().test_tool_calling_with_no_arguments(model)
|
||||
|
||||
@pytest.mark.xfail(
|
||||
reason=("Fails with 'Failed to call a function. Please adjust your prompt.'")
|
||||
)
|
||||
def test_tool_message_histories_string_content(
|
||||
self, model: BaseChatModel, my_adder_tool: BaseTool
|
||||
) -> None:
|
||||
super().test_tool_message_histories_string_content(model, my_adder_tool)
|
||||
|
||||
@pytest.mark.xfail(
|
||||
reason=(
|
||||
"Sometimes fails with 'Failed to call a function. "
|
||||
"Please adjust your prompt.'"
|
||||
)
|
||||
)
|
||||
def test_bind_runnables_as_tools(self, model: BaseChatModel) -> None:
|
||||
super().test_bind_runnables_as_tools(model)
|
||||
|
@ -313,8 +313,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -407,31 +407,31 @@ typing = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "groq", specifier = ">=0.4.1,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
]
|
||||
test-integration = [{ name = "langchain-core", directory = "../../core" }]
|
||||
test-integration = [{ name = "langchain-core", editable = "../../core" }]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.10"
|
||||
source = { directory = "../../standard-tests" }
|
||||
version = "0.3.11"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
@ -446,7 +446,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -455,6 +455,16 @@ requires-dist = [
|
||||
{ name = "syrupy", specifier = ">=4,<5" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", editable = "../../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.3.5"
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.15",
|
||||
@ -33,26 +33,20 @@ test = [
|
||||
"scipy<2.0.0,>=1.7.0; python_version >= \"3.12\"",
|
||||
"numpy<2,>=1; python_version < \"3.12\"",
|
||||
"numpy<2.0.0,>=1.26.0; python_version >= \"3.12\"",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../../standard-tests",
|
||||
"langchain-community @ file:///${PROJECT_ROOT}/../../community",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
dev = [
|
||||
"ipykernel<7.0.0,>=6.29.2",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-core",
|
||||
"langchain-tests",
|
||||
"langchain-community",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
dev = ["ipykernel<7.0.0,>=6.29.2", "langchain-core"]
|
||||
test_integration = []
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
typing = ["mypy<2.0,>=1.10", "langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-tests = { path = "../../standard-tests", editable = true }
|
||||
langchain-community = { path = "../../community", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
@ -853,8 +853,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.3.18rc1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
version = "0.3.18"
|
||||
source = { editable = "../../langchain" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11'" },
|
||||
@ -868,15 +868,104 @@ dependencies = [
|
||||
{ name = "sqlalchemy" },
|
||||
{ name = "tenacity" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5d/87/79e4c09d85f81805420ab00cac7719a6c80f3bb822cb095592e6c3bbf856/langchain-0.3.18rc1.tar.gz", hash = "sha256:486e858d3588af703427f4a642520c6159a9a0a98379571372f24415f91b1933", size = 10223626 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/92/76/0211285da97ed5f4823d82baf063596da9f13848dc693fdd3bf1099931ef/langchain-0.3.18rc1-py3-none-any.whl", hash = "sha256:373dd95fda919fdd3fffdb0095c2ae3c5a3f89f8d0a7202864d44d5ebecfb660", size = 1010357 },
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "aiohttp", specifier = ">=3.8.3,<4.0.0" },
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11'", specifier = ">=4.0.0,<5.0.0" },
|
||||
{ name = "langchain-anthropic", marker = "extra == 'anthropic'" },
|
||||
{ name = "langchain-aws", marker = "extra == 'aws'" },
|
||||
{ name = "langchain-cohere", marker = "extra == 'cohere'" },
|
||||
{ name = "langchain-community", marker = "extra == 'community'" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-deepseek", marker = "extra == 'deepseek'" },
|
||||
{ name = "langchain-fireworks", marker = "extra == 'fireworks'" },
|
||||
{ name = "langchain-google-genai", marker = "extra == 'google-genai'" },
|
||||
{ name = "langchain-google-vertexai", marker = "extra == 'google-vertexai'" },
|
||||
{ name = "langchain-groq", marker = "extra == 'groq'" },
|
||||
{ name = "langchain-huggingface", marker = "extra == 'huggingface'" },
|
||||
{ name = "langchain-mistralai", marker = "extra == 'mistralai'" },
|
||||
{ name = "langchain-ollama", marker = "extra == 'ollama'" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'", editable = "../openai" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "langchain-together", marker = "extra == 'together'" },
|
||||
{ name = "langsmith", specifier = ">=0.1.17,<0.4" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.26.4,<2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pydantic", specifier = ">=2.7.4,<3.0.0" },
|
||||
{ name = "pyyaml", specifier = ">=5.3" },
|
||||
{ name = "requests", specifier = ">=2,<3" },
|
||||
{ name = "sqlalchemy", specifier = ">=1.4,<3" },
|
||||
{ name = "tenacity", specifier = ">=8.1.0,!=8.4.0,<10" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "playwright", specifier = ">=1.28.0,<2.0.0" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
lint = [
|
||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = ">=1.5.14,<1.6" },
|
||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "duckdb-engine", specifier = ">=0.9.2,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-openai", editable = "../openai" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "lark", specifier = ">=1.1.5,<2.0.0" },
|
||||
{ name = "packaging", specifier = ">=24.2" },
|
||||
{ name = "pandas", specifier = ">=2.0.0,<3.0.0" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.23.2,<1.0.0" },
|
||||
{ name = "pytest-cov", specifier = ">=4.0.0,<5.0.0" },
|
||||
{ name = "pytest-dotenv", specifier = ">=0.5.2,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.6.0,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.2.6,<1.0.0" },
|
||||
{ name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" },
|
||||
{ name = "requests-mock", specifier = ">=1.11.0,<2.0.0" },
|
||||
{ name = "responses", specifier = ">=0.22.0,<1.0.0" },
|
||||
{ name = "syrupy", specifier = ">=4.0.2,<5.0.0" },
|
||||
{ name = "toml", specifier = ">=0.10.2" },
|
||||
]
|
||||
test-integration = [
|
||||
{ name = "cassio", specifier = ">=0.1.0,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "langchainhub", specifier = ">=0.1.16,<1.0.0" },
|
||||
{ name = "pytest-vcr", specifier = ">=1.0.2,<2.0.0" },
|
||||
{ name = "python-dotenv", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "urllib3", marker = "python_full_version < '3.10'", specifier = "<2" },
|
||||
{ name = "wrapt", specifier = ">=1.15.0,<2.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "mypy-protobuf", specifier = ">=3.0.0,<4.0.0" },
|
||||
{ name = "types-chardet", specifier = ">=5.0.4.6,<6.0.0.0" },
|
||||
{ name = "types-pytz", specifier = ">=2023.3.0.0,<2024.0.0.0" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" },
|
||||
{ name = "types-redis", specifier = ">=4.3.21.6,<5.0.0.0" },
|
||||
{ name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" },
|
||||
{ name = "types-toml", specifier = ">=0.10.8.1,<1.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-community"
|
||||
version = "0.3.17rc1"
|
||||
source = { directory = "../../community" }
|
||||
version = "0.3.17"
|
||||
source = { editable = "../../community" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "dataclasses-json" },
|
||||
@ -897,8 +986,8 @@ requires-dist = [
|
||||
{ name = "aiohttp", specifier = ">=3.8.3,<4.0.0" },
|
||||
{ name = "dataclasses-json", specifier = ">=0.5.7,<0.7" },
|
||||
{ name = "httpx-sse", specifier = ">=0.4.0,<1.0.0" },
|
||||
{ name = "langchain", specifier = ">=0.3.18rc1,<1.0.0" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.34rc1,<1.0.0" },
|
||||
{ name = "langchain", editable = "../../langchain" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langsmith", specifier = ">=0.1.125,<0.4" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.26.4,<2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
@ -913,22 +1002,23 @@ requires-dist = [
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
lint = [
|
||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "ruff", specifier = ">=0.5,<0.6" },
|
||||
{ name = "ruff", specifier = ">=0.9,<0.10" },
|
||||
]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = ">=1.5.13,<1.6" },
|
||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "duckdb-engine", specifier = ">=0.13.6,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain", directory = "../langchain" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-tests", directory = "../standard-tests" },
|
||||
{ name = "langchain", editable = "../../langchain" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "lark", specifier = ">=1.1.5,<2.0.0" },
|
||||
{ name = "pandas", specifier = ">=2.0.0,<3.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.4.4,<8.0.0" },
|
||||
@ -949,9 +1039,9 @@ test-integration = [
|
||||
{ name = "vcrpy", specifier = ">=6,<7" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain", directory = "../langchain" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain", editable = "../../langchain" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.12,<2.0" },
|
||||
{ name = "mypy-protobuf", specifier = ">=3.0.0,<4.0.0" },
|
||||
{ name = "types-chardet", specifier = ">=5.0.4.6,<6.0.0.0" },
|
||||
@ -964,8 +1054,8 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -1063,7 +1153,7 @@ typing = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "huggingface-hub", specifier = ">=0.23.0" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.15,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "sentence-transformers", specifier = ">=2.6.0" },
|
||||
{ name = "tokenizers", specifier = ">=0.19.1" },
|
||||
{ name = "transformers", specifier = ">=4.39.0" },
|
||||
@ -1073,13 +1163,13 @@ requires-dist = [
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "ipykernel", specifier = ">=6.29.2,<7.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "langchain-community", directory = "../../community" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-community", editable = "../../community" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1,<2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<2.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
@ -1091,14 +1181,14 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.10"
|
||||
source = { directory = "../../standard-tests" }
|
||||
version = "0.3.11"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
@ -1112,7 +1202,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -1121,16 +1211,58 @@ requires-dist = [
|
||||
{ name = "syrupy", specifier = ">=4,<5" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", editable = "../../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
version = "0.3.6"
|
||||
source = { editable = "../../text-splitters" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/10/35/a6f8d6b1bb0e6e8c00b49bce4d1a115f8b68368b1899f65bb34dbbb44160/langchain_text_splitters-0.3.5.tar.gz", hash = "sha256:11cb7ca3694e5bdd342bc16d3875b7f7381651d4a53cbb91d34f22412ae16443", size = 26318 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/83/f8081c3bea416bd9d9f0c26af795c74f42c24f9ad3c4fbf361b7d69de134/langchain_text_splitters-0.3.5-py3-none-any.whl", hash = "sha256:8c9b059827438c5fa8f327b4df857e307828a5ec815163c9b5c9569a3e82c8ee", size = 31620 },
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "langchain-core", editable = "../../core" }]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
]
|
||||
lint = [
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
{ name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" },
|
||||
]
|
||||
test-integration = [
|
||||
{ name = "nltk", specifier = ">=3.9.1,<4.0.0" },
|
||||
{ name = "sentence-transformers", marker = "python_full_version < '3.13'", specifier = ">=2.6.0" },
|
||||
{ name = "spacy", marker = "python_full_version < '3.10'", specifier = ">=3.0.0,<3.8.4" },
|
||||
{ name = "spacy", marker = "python_full_version < '3.13'", specifier = ">=3.0.0,<4.0.0" },
|
||||
{ name = "transformers", specifier = ">=4.47.0,<5.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "lxml-stubs", specifier = ">=0.5.1,<1.0.0" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "tiktoken", specifier = ">=0.8.0,<1.0.0" },
|
||||
{ name = "types-requests", specifier = ">=2.31.0.20240218,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.33",
|
||||
@ -28,35 +28,32 @@ test = [
|
||||
"pytest<8.0.0,>=7.3.0",
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../../standard-tests",
|
||||
"langchain-core",
|
||||
"langchain-tests",
|
||||
]
|
||||
test_integration = []
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
dev = ["langchain-core"]
|
||||
typing = ["mypy<2.0,>=1.10", "langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-tests = { path = "../../standard-tests", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [ "E", "F", "I", "T201",]
|
||||
select = ["E", "F", "I", "T201"]
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = [ "tests/*",]
|
||||
omit = ["tests/*"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--strict-markers --strict-config --durations=5"
|
||||
markers = [ "requires: mark tests as requiring a specific library", "compile: mark placeholder test used to compile integration tests without running them",]
|
||||
markers = [
|
||||
"requires: mark tests as requiring a specific library",
|
||||
"compile: mark placeholder test used to compile integration tests without running them",
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
|
@ -332,8 +332,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -426,32 +426,32 @@ typing = [
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.2,<1" },
|
||||
{ name = "httpx-sse", specifier = ">=0.3.1,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "pydantic", specifier = ">=2,<3" },
|
||||
{ name = "tokenizers", specifier = ">=0.15.1,<1" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.10"
|
||||
source = { directory = "../../standard-tests" }
|
||||
version = "0.3.11"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
@ -466,7 +466,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -475,6 +475,16 @@ requires-dist = [
|
||||
{ name = "syrupy", specifier = ">=4,<5" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", editable = "../../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.3.5"
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.2,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43",
|
||||
@ -29,32 +29,25 @@ test = [
|
||||
"syrupy<5.0.0,>=4.0.2",
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-core",
|
||||
"numpy<2.0.0,>=1.24.0; python_version < \"3.12\"",
|
||||
"numpy<2.0.0,>=1.26.0; python_version >= \"3.12\"",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
test_integration = []
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.1.5",
|
||||
]
|
||||
typing = [
|
||||
"mypy<1.0,>=0.991",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
lint = ["ruff<1.0.0,>=0.1.5"]
|
||||
typing = ["mypy<1.0,>=0.991", "langchain-core"]
|
||||
dev = ["langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes
|
||||
"I", # isort
|
||||
"T201", # print
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes
|
||||
"I", # isort
|
||||
"T201", # print
|
||||
]
|
||||
|
||||
[tool.mypy]
|
||||
@ -77,7 +70,7 @@ addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5
|
||||
# Registering custom markers.
|
||||
# https://docs.pytest.org/en/7.1.x/example/markers.html#registering-markers
|
||||
markers = [
|
||||
"requires: mark tests as requiring a specific library",
|
||||
"compile: mark placeholder test used to compile integration tests without running them",
|
||||
"requires: mark tests as requiring a specific library",
|
||||
"compile: mark placeholder test used to compile integration tests without running them",
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
|
@ -333,8 +333,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -426,18 +426,18 @@ typing = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "langchain-core", specifier = ">=0.2.43,!=0.3.0,!=0.3.1,!=0.3.2,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "nomic", specifier = ">=3.1.2,<4.0.0" },
|
||||
{ name = "pillow", specifier = ">=10.3.0,<11.0.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.1.5,<1.0.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<2.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
@ -448,7 +448,7 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=0.991,<1.0" },
|
||||
]
|
||||
|
||||
|
@ -4,12 +4,9 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"ollama<1,>=0.4.4",
|
||||
"langchain-core<1.0.0,>=0.3.33",
|
||||
]
|
||||
dependencies = ["ollama<1,>=0.4.4", "langchain-core<1.0.0,>=0.3.33"]
|
||||
name = "langchain-ollama"
|
||||
version = "0.2.3"
|
||||
description = "An integration package connecting Ollama and LangChain"
|
||||
@ -27,35 +24,29 @@ test = [
|
||||
"syrupy<5.0.0,>=4.0.2",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../../standard-tests",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.6",
|
||||
"langchain-core",
|
||||
"langchain-tests",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.6"]
|
||||
test_integration = []
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.1.8",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0.0,>=1.7.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
lint = ["ruff<1.0.0,>=0.1.8"]
|
||||
dev = ["langchain-core"]
|
||||
typing = ["mypy<2.0.0,>=1.7.1", "langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-tests = { path = "../../standard-tests", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes
|
||||
"I", # isort
|
||||
"T201", # print
|
||||
"D", # pydocstyle
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes
|
||||
"I", # isort
|
||||
"T201", # print
|
||||
"D", # pydocstyle
|
||||
|
||||
]
|
||||
|
||||
@ -71,6 +62,6 @@ omit = ["tests/*"]
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5"
|
||||
markers = [
|
||||
"compile: mark placeholder test used to compile integration tests without running them",
|
||||
"compile: mark placeholder test used to compile integration tests without running them",
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
|
@ -287,8 +287,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -378,17 +378,17 @@ typing = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "ollama", specifier = ">=0.4.4,<1" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.6,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.1.8,<1.0.0" }]
|
||||
test = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "pytest", specifier = ">=7.4.3,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.23.2,<1.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
|
||||
@ -397,14 +397,14 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.7.1,<2.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.10"
|
||||
source = { directory = "../../standard-tests" }
|
||||
version = "0.3.11"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
@ -419,7 +419,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -428,6 +428,16 @@ requires-dist = [
|
||||
{ name = "syrupy", specifier = ">=4,<5" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", editable = "../../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.3.5"
|
||||
|
@ -573,7 +573,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
||||
chunk_size_ = chunk_size or self.chunk_size
|
||||
if not self.check_embedding_ctx_length:
|
||||
embeddings: List[List[float]] = []
|
||||
for i in range(0, len(texts), self.chunk_size):
|
||||
for i in range(0, len(texts), chunk_size_):
|
||||
response = self.client.create(
|
||||
input=texts[i : i + chunk_size_], **self._invocation_params
|
||||
)
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.34",
|
||||
@ -34,30 +34,23 @@ test = [
|
||||
"pytest-xdist<4.0.0,>=3.6.1",
|
||||
"numpy<2,>=1; python_version < \"3.12\"",
|
||||
"numpy<2.0.0,>=1.26.0; python_version >= \"3.12\"",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../../standard-tests",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-core",
|
||||
"langchain-tests",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
dev = ["langchain-core"]
|
||||
test_integration = [
|
||||
"httpx<1.0.0,>=0.27.0",
|
||||
"pillow<11.0.0,>=10.3.0",
|
||||
"numpy<2,>=1; python_version < \"3.12\"",
|
||||
"numpy<2.0.0,>=1.26.0; python_version >= \"3.12\"",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"types-tqdm<5.0.0.0,>=4.66.0.5",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
typing = ["mypy<2.0,>=1.10", "types-tqdm<5.0.0.0,>=4.66.0.5", "langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-tests = { path = "../../standard-tests", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
@ -66,17 +59,23 @@ module = "transformers"
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [ "E", "F", "I", "T201",]
|
||||
select = ["E", "F", "I", "T201"]
|
||||
|
||||
[tool.ruff.format]
|
||||
docstring-code-format = true
|
||||
skip-magic-trailing-comma = true
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = [ "tests/*",]
|
||||
omit = ["tests/*"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5 --cov=langchain_openai"
|
||||
markers = [ "requires: mark tests as requiring a specific library", "compile: mark placeholder test used to compile integration tests without running them", "scheduled: mark tests to run in scheduled testing",]
|
||||
markers = [
|
||||
"requires: mark tests as requiring a specific library",
|
||||
"compile: mark placeholder test used to compile integration tests without running them",
|
||||
"scheduled: mark tests to run in scheduled testing",
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
filterwarnings = [ "ignore::langchain_core._api.beta_decorator.LangChainBetaWarning",]
|
||||
filterwarnings = [
|
||||
"ignore::langchain_core._api.beta_decorator.LangChainBetaWarning",
|
||||
]
|
||||
|
@ -1,4 +1,5 @@
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
@ -16,3 +17,23 @@ def test_openai_incorrect_field() -> None:
|
||||
with pytest.warns(match="not default parameter"):
|
||||
llm = OpenAIEmbeddings(foo="bar") # type: ignore[call-arg]
|
||||
assert llm.model_kwargs == {"foo": "bar"}
|
||||
|
||||
|
||||
def test_embed_documents_with_custom_chunk_size() -> None:
|
||||
embeddings = OpenAIEmbeddings(chunk_size=2, check_embedding_ctx_length=False)
|
||||
texts = ["text1", "text2", "text3", "text4"]
|
||||
custom_chunk_size = 3
|
||||
|
||||
with patch.object(embeddings.client, "create") as mock_create:
|
||||
mock_create.side_effect = [
|
||||
{"data": [{"embedding": [0.1, 0.2]}, {"embedding": [0.3, 0.4]}]},
|
||||
{"data": [{"embedding": [0.5, 0.6]}, {"embedding": [0.7, 0.8]}]},
|
||||
]
|
||||
|
||||
result = embeddings.embed_documents(texts, chunk_size=custom_chunk_size)
|
||||
|
||||
mock_create.call_args
|
||||
mock_create.assert_any_call(input=texts[0:3], **embeddings._invocation_params)
|
||||
mock_create.assert_any_call(input=texts[3:4], **embeddings._invocation_params)
|
||||
|
||||
assert result == [[0.1, 0.2], [0.3, 0.4], [0.5, 0.6], [0.7, 0.8]]
|
||||
|
@ -462,8 +462,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -565,19 +565,19 @@ typing = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "openai", specifier = ">=1.58.1,<2.0.0" },
|
||||
{ name = "tiktoken", specifier = ">=0.7,<1" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1,<2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<2.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
@ -596,7 +596,7 @@ test-integration = [
|
||||
{ name = "pillow", specifier = ">=10.3.0,<11.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-tqdm", specifier = ">=4.66.0.5,<5.0.0.0" },
|
||||
]
|
||||
@ -604,7 +604,7 @@ typing = [
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.11"
|
||||
source = { directory = "../../standard-tests" }
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
@ -618,7 +618,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -630,10 +630,10 @@ requires-dist = [
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", directory = "../core" }]
|
||||
test = [{ name = "langchain-core", editable = "../../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<3.14,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.34",
|
||||
@ -32,26 +32,17 @@ test = [
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"pytest-asyncio<1,>=0.25.0",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
test_integration = [
|
||||
"langchain-openai @ file:///${PROJECT_ROOT}/../openai",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"simsimd<6.0.0,>=5.0.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-core",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
test_integration = ["langchain-openai"]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
dev = ["langchain-core"]
|
||||
typing = ["mypy<2.0,>=1.10", "simsimd<6.0.0,>=5.0.0", "langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-openai = { path = "../openai", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
@ -594,8 +594,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -653,7 +653,7 @@ typing = [
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.5"
|
||||
source = { directory = "../openai" }
|
||||
source = { editable = "../openai" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
{ name = "openai" },
|
||||
@ -662,19 +662,19 @@ dependencies = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "openai", specifier = ">=1.58.1,<2.0.0" },
|
||||
{ name = "tiktoken", specifier = ">=0.7,<1" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1,<2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<2.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
@ -693,7 +693,7 @@ test-integration = [
|
||||
{ name = "pillow", specifier = ">=10.3.0,<11.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-tqdm", specifier = ">=4.66.0.5,<5.0.0.0" },
|
||||
]
|
||||
@ -742,7 +742,7 @@ typing = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "aiohttp", specifier = ">=3.10,<3.11" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", specifier = ">=0.3.7,<1.0.0" },
|
||||
{ name = "numpy", specifier = ">=1.26.4,<2.0.0" },
|
||||
{ name = "pinecone", specifier = ">=5.4.0,<6.0.0" },
|
||||
@ -750,11 +750,11 @@ requires-dist = [
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.25.0,<1" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
@ -762,9 +762,9 @@ test = [
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
{ name = "syrupy", specifier = ">=4.0.2,<5.0.0" },
|
||||
]
|
||||
test-integration = [{ name = "langchain-openai", directory = "../openai" }]
|
||||
test-integration = [{ name = "langchain-openai", editable = "../openai" }]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "simsimd", specifier = ">=5.0.0,<6.0.0" },
|
||||
]
|
||||
|
@ -4,12 +4,9 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.15",
|
||||
"pyyaml<7.0.0,>=6.0.1",
|
||||
]
|
||||
dependencies = ["langchain-core<1.0.0,>=0.3.15", "pyyaml<7.0.0,>=6.0.1"]
|
||||
name = "langchain-prompty"
|
||||
version = "0.1.1"
|
||||
description = "An integration package connecting Prompty and LangChain"
|
||||
@ -28,38 +25,40 @@ test = [
|
||||
"syrupy<5.0.0,>=4.0.2",
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain @ file:///${PROJECT_ROOT}/../../langchain",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../../text-splitters",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
"langchain-core",
|
||||
"langchain",
|
||||
"langchain-text-splitters",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
test_integration = []
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.1.5",
|
||||
]
|
||||
dev = [
|
||||
"types-pyyaml<7.0.0.0,>=6.0.12.20240311",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
lint = ["ruff<1.0.0,>=0.1.5"]
|
||||
dev = ["types-pyyaml<7.0.0.0,>=6.0.12.20240311", "langchain-core"]
|
||||
typing = [
|
||||
"mypy<1.0,>=0.991",
|
||||
"types-pyyaml<7.0.0.0,>=6.0.12.20240311",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-core",
|
||||
]
|
||||
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-text-splitters = { path = "../../text-splitters", editable = true }
|
||||
langchain = { path = "../../langchain", editable = true }
|
||||
|
||||
|
||||
[tool.ruff]
|
||||
select = [ "E", "F", "I",]
|
||||
select = ["E", "F", "I"]
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = [ "tests/*",]
|
||||
omit = ["tests/*"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5"
|
||||
markers = [ "requires: mark tests as requiring a specific library", "compile: mark placeholder test used to compile integration tests without running them",]
|
||||
markers = [
|
||||
"requires: mark tests as requiring a specific library",
|
||||
"compile: mark placeholder test used to compile integration tests without running them",
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
|
@ -576,8 +576,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.3.18rc1"
|
||||
source = { directory = "../../langchain" }
|
||||
version = "0.3.18"
|
||||
source = { editable = "../../langchain" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11'" },
|
||||
@ -601,7 +601,7 @@ requires-dist = [
|
||||
{ name = "langchain-aws", marker = "extra == 'aws'" },
|
||||
{ name = "langchain-cohere", marker = "extra == 'cohere'" },
|
||||
{ name = "langchain-community", marker = "extra == 'community'" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-deepseek", marker = "extra == 'deepseek'" },
|
||||
{ name = "langchain-fireworks", marker = "extra == 'fireworks'" },
|
||||
{ name = "langchain-google-genai", marker = "extra == 'google-genai'" },
|
||||
@ -610,8 +610,8 @@ requires-dist = [
|
||||
{ name = "langchain-huggingface", marker = "extra == 'huggingface'" },
|
||||
{ name = "langchain-mistralai", marker = "extra == 'mistralai'" },
|
||||
{ name = "langchain-ollama", marker = "extra == 'ollama'" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'" },
|
||||
{ name = "langchain-text-splitters", specifier = ">=0.3.3,<1.0.0" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'", editable = "../openai" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "langchain-together", marker = "extra == 'together'" },
|
||||
{ name = "langsmith", specifier = ">=0.1.17,<0.4" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.26.4,<2" },
|
||||
@ -627,8 +627,8 @@ requires-dist = [
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "playwright", specifier = ">=1.28.0,<2.0.0" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
@ -638,14 +638,15 @@ lint = [
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = ">=1.5.14,<1.6" },
|
||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "duckdb-engine", specifier = ">=0.9.2,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-openai", directory = "../partners/openai" },
|
||||
{ name = "langchain-tests", directory = "../standard-tests" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-openai", editable = "../openai" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "lark", specifier = ">=1.1.5,<2.0.0" },
|
||||
{ name = "packaging", specifier = ">=24.2" },
|
||||
{ name = "pandas", specifier = ">=2.0.0,<3.0.0" },
|
||||
@ -664,8 +665,8 @@ test = [
|
||||
]
|
||||
test-integration = [
|
||||
{ name = "cassio", specifier = ">=0.1.0,<1.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "langchainhub", specifier = ">=0.1.16,<1.0.0" },
|
||||
{ name = "pytest-vcr", specifier = ">=1.0.2,<2.0.0" },
|
||||
{ name = "python-dotenv", specifier = ">=1.0.0,<2.0.0" },
|
||||
@ -673,8 +674,8 @@ test-integration = [
|
||||
{ name = "wrapt", specifier = ">=1.15.0,<2.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "mypy-protobuf", specifier = ">=3.0.0,<4.0.0" },
|
||||
{ name = "types-chardet", specifier = ">=5.0.4.6,<6.0.0.0" },
|
||||
@ -687,8 +688,8 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -782,22 +783,22 @@ typing = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "langchain-core", specifier = ">=0.3.15,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "pyyaml", specifier = ">=6.0.1,<7.0.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.20240311,<7.0.0.0" },
|
||||
]
|
||||
lint = [{ name = "ruff", specifier = ">=0.1.5,<1.0.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain", directory = "../../langchain" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../../text-splitters" },
|
||||
{ name = "langchain", editable = "../../langchain" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-text-splitters", editable = "../../text-splitters" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
@ -806,34 +807,34 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=0.991,<1.0" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.20240311,<7.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.6rc1"
|
||||
source = { directory = "../../text-splitters" }
|
||||
version = "0.3.6"
|
||||
source = { editable = "../../text-splitters" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "langchain-core", specifier = ">=0.3.34rc1,<1.0.0" }]
|
||||
requires-dist = [{ name = "langchain-core", editable = "../../core" }]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
]
|
||||
lint = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4,>=3.9"
|
||||
dependencies = [
|
||||
"qdrant-client<2.0.0,>=1.10.1",
|
||||
@ -36,27 +36,16 @@ test = [
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"requests<3.0.0,>=2.31.0",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core ; python_version >= \"3.9\"",
|
||||
"langchain-core<0.3,>=0.1.40; python_version < \"3.9\"",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
"langchain-core",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
test_integration = []
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core ; python_version >= \"3.9\"",
|
||||
"langchain-core<0.3,>=0.1.52; python_version < \"3.9\"",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"simsimd<7.0.0,>=6.0.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core ; python_version >= \"3.9\"",
|
||||
"langchain-core<0.3,>=0.1.52; python_version < \"3.9\"",
|
||||
]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
dev = ["langchain-core"]
|
||||
typing = ["mypy<2.0,>=1.10", "simsimd<7.0.0,>=6.0.0", "langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
|
||||
[tool.ruff]
|
||||
select = ["E", "F", "I"]
|
||||
|
@ -540,8 +540,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -641,22 +641,18 @@ typing = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "fastembed", marker = "python_full_version >= '3.9' and python_full_version < '3.13' and extra == 'fastembed'", specifier = ">=0.3.3,<1.0.0" },
|
||||
{ name = "langchain-core", specifier = ">=0.2.43,!=0.3.0,!=0.3.1,!=0.3.2,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "pydantic", specifier = ">=2.7.4,<3.0.0" },
|
||||
{ name = "qdrant-client", specifier = ">=1.10.1,<2.0.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "langchain-core", marker = "python_full_version < '3.9'", specifier = ">=0.1.52,<0.3" },
|
||||
{ name = "langchain-core", marker = "python_full_version >= '3.9'", directory = "../../core" },
|
||||
]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", marker = "python_full_version < '3.9'", specifier = ">=0.1.40,<0.3" },
|
||||
{ name = "langchain-core", marker = "python_full_version >= '3.9'", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
@ -667,8 +663,7 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", marker = "python_full_version < '3.9'", specifier = ">=0.1.52,<0.3" },
|
||||
{ name = "langchain-core", marker = "python_full_version >= '3.9'", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "simsimd", specifier = ">=6.0.0,<7.0.0" },
|
||||
]
|
||||
|
@ -7,7 +7,7 @@ includes = []
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<3.13,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.29",
|
||||
@ -35,34 +35,30 @@ test = [
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"numpy<2.0.0,>=1.24.0; python_version < \"3.12\"",
|
||||
"numpy<2.0.0,>=1.26.0; python_version >= \"3.12\"",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
"langchain-core",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
test_integration = []
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.1.5",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
lint = ["ruff<1.0.0,>=0.1.5"]
|
||||
dev = ["langchain-core"]
|
||||
typing = ["mypy<2.0,>=1.10", "langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [ "E", "F", "I",]
|
||||
select = ["E", "F", "I"]
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = [ "tests/*",]
|
||||
omit = ["tests/*"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--strict-markers --strict-config --durations=5"
|
||||
markers = [ "requires: mark tests as requiring a specific library", "compile: mark placeholder test used to compile integration tests without running them",]
|
||||
markers = [
|
||||
"requires: mark tests as requiring a specific library",
|
||||
"compile: mark placeholder test used to compile integration tests without running them",
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
|
@ -506,8 +506,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -600,18 +600,18 @@ typing = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "langchain-core", specifier = ">=0.3.29,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "pydantic", specifier = ">=2,<3" },
|
||||
{ name = "voyageai", specifier = ">=0.3.2,<1" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.1.5,<1.0.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<2.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
@ -623,7 +623,7 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
]
|
||||
|
||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-openai<0.4,>=0.3",
|
||||
@ -31,30 +31,24 @@ test = [
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"docarray<1.0.0,>=0.32.1",
|
||||
"langchain-openai @ file:///${PROJECT_ROOT}/../openai",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../../standard-tests",
|
||||
"langchain-openai",
|
||||
"langchain-core",
|
||||
"langchain-tests",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
test_integration = [
|
||||
"numpy<2,>=1; python_version < \"3.12\"",
|
||||
"numpy<2.0.0,>=1.26.0; python_version >= \"3.12\"",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"types-requests<3,>=2",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../../core",
|
||||
]
|
||||
lint = ["ruff<1.0,>=0.5"]
|
||||
typing = ["mypy<2.0,>=1.10", "types-requests<3,>=2", "langchain-core"]
|
||||
dev = ["langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../../core", editable = true }
|
||||
langchain-tests = { path = "../../standard-tests", editable = true }
|
||||
langchain-openai = { path = "../openai", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
@ -612,8 +612,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "../../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -670,8 +670,8 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.3"
|
||||
source = { directory = "../openai" }
|
||||
version = "0.3.5"
|
||||
source = { editable = "../openai" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
{ name = "openai" },
|
||||
@ -680,19 +680,19 @@ dependencies = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "openai", specifier = ">=1.58.1,<2.0.0" },
|
||||
{ name = "tiktoken", specifier = ">=0.7,<1" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1,<2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<2.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
@ -711,15 +711,15 @@ test-integration = [
|
||||
{ name = "pillow", specifier = ">=10.3.0,<11.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-tqdm", specifier = ">=4.66.0.5,<5.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.10"
|
||||
source = { directory = "../../standard-tests" }
|
||||
version = "0.3.11"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
@ -733,7 +733,7 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -742,6 +742,16 @@ requires-dist = [
|
||||
{ name = "syrupy", specifier = ">=4,<5" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", editable = "../../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-xai"
|
||||
version = "0.2.0"
|
||||
@ -788,21 +798,21 @@ typing = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "aiohttp", specifier = ">=3.9.1,<4" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.29,<1.0.0" },
|
||||
{ name = "langchain-openai", specifier = ">=0.3,<0.4" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-openai", editable = "../openai" },
|
||||
{ name = "requests", specifier = ">=2,<3" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "docarray", specifier = ">=0.32.1,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-openai", directory = "../openai" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-openai", editable = "../openai" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
@ -815,7 +825,7 @@ test-integration = [
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<2.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-requests", specifier = ">=2,<3" },
|
||||
]
|
||||
|
@ -1122,7 +1122,7 @@ class ChatModelIntegrationTests(ChatModelTests):
|
||||
model_with_tools = model.bind_tools(
|
||||
[magic_function_no_args], tool_choice=tool_choice
|
||||
)
|
||||
query = "What is the value of magic_function()? Use the tool."
|
||||
query = "What is the value of magic_function_no_args()? Use the tool."
|
||||
result = model_with_tools.invoke(query)
|
||||
_validate_tool_call_message_no_args(result)
|
||||
|
||||
|
@ -3,13 +3,11 @@ requires = ["pdm-backend"]
|
||||
build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = [
|
||||
{name = "Erick Friis", email = "erick@langchain.dev"},
|
||||
]
|
||||
license = {text = "MIT"}
|
||||
authors = [{ name = "Erick Friis", email = "erick@langchain.dev" }]
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.34",
|
||||
"langchain-core<1.0.0,>=0.3.35",
|
||||
"pytest<9,>=7",
|
||||
"pytest-asyncio<1,>=0.20",
|
||||
"httpx<1,>=0.25.0",
|
||||
@ -19,7 +17,7 @@ dependencies = [
|
||||
"numpy<3,>=1.26.2; python_version >= \"3.12\"",
|
||||
]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.11"
|
||||
version = "0.3.12"
|
||||
description = "Standard tests for LangChain implementations"
|
||||
readme = "README.md"
|
||||
|
||||
@ -28,21 +26,14 @@ readme = "README.md"
|
||||
repository = "https://github.com/langchain-ai/langchain"
|
||||
|
||||
[dependency-groups]
|
||||
test = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
test = ["langchain-core"]
|
||||
test_integration = []
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.9.2",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2,>=1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
lint = ["ruff<1.0.0,>=0.9.2"]
|
||||
typing = ["mypy<2,>=1", "langchain-core"]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../core", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
@ -287,8 +287,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34"
|
||||
source = { directory = "../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -345,13 +345,13 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.11"
|
||||
version = "0.3.12"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
|
||||
{ name = "numpy", version = "2.2.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "numpy", version = "2.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "pytest-socket" },
|
||||
@ -376,7 +376,7 @@ typing = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
@ -388,16 +388,16 @@ requires-dist = [
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
test = [{ name = "langchain-core", directory = "../core" }]
|
||||
test = [{ name = "langchain-core", editable = "../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "mypy", specifier = ">=1,<2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.3.5"
|
||||
version = "0.3.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
@ -407,9 +407,9 @@ dependencies = [
|
||||
{ name = "requests-toolbelt" },
|
||||
{ name = "zstandard" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b7/2c/d8acbc61896f5fc210a3f3bc8ddf39db5213b23eaf83a755ba334be30212/langsmith-0.3.5.tar.gz", hash = "sha256:d891a205f70ab0b2c26311db6c52486ffc9fc1124238b999619445f6ae900725", size = 321847 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/1a/974b66a9e7c43f41bec067e1f393a296803aee48fafcf183941c31295b59/langsmith-0.3.8.tar.gz", hash = "sha256:97f9bebe0b7cb0a4f278e6ff30ae7d5ededff3883b014442ec6d7d575b02a0f1", size = 321394 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/00/dbbb9df2c575217326021da731534f246dce4bb6e95b55432ff7191643ac/langsmith-0.3.5-py3-none-any.whl", hash = "sha256:29da924d2e3662dd56f96d179ebc06662b66dd0b2317362ccebe0de1b78750e7", size = 333276 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/e4/5380e8229c442e406404977d2ec71a9db6a3e6a89fce7791c6ad7cd2bdbe/langsmith-0.3.8-py3-none-any.whl", hash = "sha256:fbb9dd97b0f090219447fca9362698d07abaeda1da85aa7cc6ec6517b36581b1", size = 332800 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -513,68 +513,68 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.2.2"
|
||||
version = "2.2.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.12.4'",
|
||||
"python_full_version >= '3.12' and python_full_version < '3.12.4'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ec/d0/c12ddfd3a02274be06ffc71f3efc6d0e457b0409c4481596881e748cb264/numpy-2.2.2.tar.gz", hash = "sha256:ed6906f61834d687738d25988ae117683705636936cc605be0bb208b23df4d8f", size = 20233295 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fb/90/8956572f5c4ae52201fdec7ba2044b2c882832dcec7d5d0922c9e9acf2de/numpy-2.2.3.tar.gz", hash = "sha256:dbdc15f0c81611925f382dfa97b3bd0bc2c1ce19d4fe50482cb0ddc12ba30020", size = 20262700 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/70/2a/69033dc22d981ad21325314f8357438078f5c28310a6d89fb3833030ec8a/numpy-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7079129b64cb78bdc8d611d1fd7e8002c0a2565da6a47c4df8062349fee90e3e", size = 21215825 },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/2c/39f91e00bbd3d5639b027ac48c55dc5f2992bd2b305412d26be4c830862a/numpy-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec6c689c61df613b783aeb21f945c4cbe6c51c28cb70aae8430577ab39f163e", size = 14354996 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/2c/d468ebd253851af10de5b3e8f3418ebabfaab5f0337a75299fbeb8b8c17a/numpy-2.2.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:40c7ff5da22cd391944a28c6a9c638a5eef77fcf71d6e3a79e1d9d9e82752715", size = 5393621 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/f4/3d8a5a0da297034106c5de92be881aca7079cde6058934215a1de91334f6/numpy-2.2.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:995f9e8181723852ca458e22de5d9b7d3ba4da3f11cc1cb113f093b271d7965a", size = 6928931 },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/a7/029354ab56edd43dd3f5efbfad292b8844f98b93174f322f82353fa46efa/numpy-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b78ea78450fd96a498f50ee096f69c75379af5138f7881a51355ab0e11286c97", size = 14333157 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/d7/11fc594838d35c43519763310c316d4fd56f8600d3fc80a8e13e325b5c5c/numpy-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fbe72d347fbc59f94124125e73fc4976a06927ebc503ec5afbfb35f193cd957", size = 16381794 },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/d4/dd9b19cd4aff9c79d3f54d17f8be815407520d3116004bc574948336981b/numpy-2.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8e6da5cffbbe571f93588f562ed130ea63ee206d12851b60819512dd3e1ba50d", size = 15543990 },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/97/ab96b7650f27f684a9b1e46757a7294ecc50cab27701d05f146e9f779627/numpy-2.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:09d6a2032faf25e8d0cadde7fd6145118ac55d2740132c1d845f98721b5ebcfd", size = 18170896 },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/9b/bae9618cab20db67a2ca9d711795cad29b2ca4b73034dd3b5d05b962070a/numpy-2.2.2-cp310-cp310-win32.whl", hash = "sha256:159ff6ee4c4a36a23fe01b7c3d07bd8c14cc433d9720f977fcd52c13c0098160", size = 6573458 },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/9b/95678092febd14070cfb7906ea7932e71e9dd5a6ab3ee948f9ed975e905d/numpy-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:64bd6e1762cd7f0986a740fee4dff927b9ec2c5e4d9a28d056eb17d332158014", size = 12915812 },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/67/32c68756eed84df181c06528ff57e09138f893c4653448c4967311e0f992/numpy-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:642199e98af1bd2b6aeb8ecf726972d238c9877b0f6e8221ee5ab945ec8a2189", size = 21220002 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/89/f43bcad18f2b2e5814457b1c7f7b0e671d0db12c8c0e43397ab8cb1831ed/numpy-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6d9fc9d812c81e6168b6d405bf00b8d6739a7f72ef22a9214c4241e0dc70b323", size = 14391215 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/e6/efb8cd6122bf25e86e3dd89d9dbfec9e6861c50e8810eed77d4be59b51c6/numpy-2.2.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c7d1fd447e33ee20c1f33f2c8e6634211124a9aabde3c617687d8b739aa69eac", size = 5391918 },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/e2/fccf89d64d9b47ffb242823d4e851fc9d36fa751908c9aac2807924d9b4e/numpy-2.2.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:451e854cfae0febe723077bd0cf0a4302a5d84ff25f0bfece8f29206c7bed02e", size = 6933133 },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/22/5ece749c0e5420a9380eef6fbf83d16a50010bd18fef77b9193d80a6760e/numpy-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd249bc894af67cbd8bad2c22e7cbcd46cf87ddfca1f1289d1e7e54868cc785c", size = 14338187 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/86/caec78829311f62afa6fa334c8dfcd79cffb4d24bcf96ee02ae4840d462b/numpy-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02935e2c3c0c6cbe9c7955a8efa8908dd4221d7755644c59d1bba28b94fd334f", size = 16393429 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/4e/0c25f74c88239a37924577d6ad780f3212a50f4b4b5f54f5e8c918d726bd/numpy-2.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a972cec723e0563aa0823ee2ab1df0cb196ed0778f173b381c871a03719d4826", size = 15559103 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/bd/d557f10fa50dc4d5871fb9606af563249b66af2fc6f99041a10e8757c6f1/numpy-2.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d6d6a0910c3b4368d89dde073e630882cdb266755565155bc33520283b2d9df8", size = 18182967 },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/e9/66cc0f66386d78ed89e45a56e2a1d051e177b6e04477c4a41cd590ef4017/numpy-2.2.2-cp311-cp311-win32.whl", hash = "sha256:860fd59990c37c3ef913c3ae390b3929d005243acca1a86facb0773e2d8d9e50", size = 6571499 },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/a3/4139296b481ae7304a43581046b8f0a20da6a0dfe0ee47a044cade796603/numpy-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:da1eeb460ecce8d5b8608826595c777728cdf28ce7b5a5a8c8ac8d949beadcf2", size = 12919805 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/e6/847d15770ab7a01e807bdfcd4ead5bdae57c0092b7dc83878171b6af97bb/numpy-2.2.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ac9bea18d6d58a995fac1b2cb4488e17eceeac413af014b1dd26170b766d8467", size = 20912636 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/af/f83580891577b13bd7e261416120e036d0d8fb508c8a43a73e38928b794b/numpy-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23ae9f0c2d889b7b2d88a3791f6c09e2ef827c2446f1c4a3e3e76328ee4afd9a", size = 14098403 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/86/d019fb60a9d0f1d4cf04b014fe88a9135090adfadcc31c1fadbb071d7fa7/numpy-2.2.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:3074634ea4d6df66be04f6728ee1d173cfded75d002c75fac79503a880bf3825", size = 5128938 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/1b/50985edb6f1ec495a1c36452e860476f5b7ecdc3fc59ea89ccad3c4926c5/numpy-2.2.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ec0636d3f7d68520afc6ac2dc4b8341ddb725039de042faf0e311599f54eb37", size = 6661937 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/1b/17efd94cad1b9d605c3f8907fb06bcffc4ce4d1d14d46b95316cccccf2b9/numpy-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ffbb1acd69fdf8e89dd60ef6182ca90a743620957afb7066385a7bbe88dc748", size = 14049518 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/73/65d2f0b698df1731e851e3295eb29a5ab8aa06f763f7e4188647a809578d/numpy-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0349b025e15ea9d05c3d63f9657707a4e1d471128a3b1d876c095f328f8ff7f0", size = 16099146 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/69/308f55c0e19d4b5057b5df286c5433822e3c8039ede06d4051d96f1c2c4e/numpy-2.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:463247edcee4a5537841d5350bc87fe8e92d7dd0e8c71c995d2c6eecb8208278", size = 15246336 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/d8/d8d333ad0d8518d077a21aeea7b7c826eff766a2b1ce1194dea95ca0bacf/numpy-2.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9dd47ff0cb2a656ad69c38da850df3454da88ee9a6fde0ba79acceee0e79daba", size = 17863507 },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/6e/0b84ad3103ffc16d6673e63b5acbe7901b2af96c2837174c6318c98e27ab/numpy-2.2.2-cp312-cp312-win32.whl", hash = "sha256:4525b88c11906d5ab1b0ec1f290996c0020dd318af8b49acaa46f198b1ffc283", size = 6276491 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/84/7f801a42a67b9772a883223a0a1e12069a14626c81a732bd70aac57aebc1/numpy-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:5acea83b801e98541619af398cc0109ff48016955cc0818f478ee9ef1c5c3dcb", size = 12616372 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/fe/df5624001f4f5c3e0b78e9017bfab7fdc18a8d3b3d3161da3d64924dd659/numpy-2.2.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b208cfd4f5fe34e1535c08983a1a6803fdbc7a1e86cf13dd0c61de0b51a0aadc", size = 20899188 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/80/d349c3b5ed66bd3cb0214be60c27e32b90a506946857b866838adbe84040/numpy-2.2.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d0bbe7dd86dca64854f4b6ce2ea5c60b51e36dfd597300057cf473d3615f2369", size = 14113972 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/50/949ec9cbb28c4b751edfa64503f0913cbfa8d795b4a251e7980f13a8a655/numpy-2.2.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:22ea3bb552ade325530e72a0c557cdf2dea8914d3a5e1fecf58fa5dbcc6f43cd", size = 5114294 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/f3/399c15629d5a0c68ef2aa7621d430b2be22034f01dd7f3c65a9c9666c445/numpy-2.2.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:128c41c085cab8a85dc29e66ed88c05613dccf6bc28b3866cd16050a2f5448be", size = 6648426 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/03/c72474c13772e30e1bc2e558cdffd9123c7872b731263d5648b5c49dd459/numpy-2.2.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:250c16b277e3b809ac20d1f590716597481061b514223c7badb7a0f9993c7f84", size = 14045990 },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/9c/96a9ab62274ffafb023f8ee08c88d3d31ee74ca58869f859db6845494fa6/numpy-2.2.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0c8854b09bc4de7b041148d8550d3bd712b5c21ff6a8ed308085f190235d7ff", size = 16096614 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/34/cd0a735534c29bec7093544b3a509febc9b0df77718a9b41ffb0809c9f46/numpy-2.2.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b6fb9c32a91ec32a689ec6410def76443e3c750e7cfc3fb2206b985ffb2b85f0", size = 15242123 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/6d/541717a554a8f56fa75e91886d9b79ade2e595918690eb5d0d3dbd3accb9/numpy-2.2.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:57b4012e04cc12b78590a334907e01b3a85efb2107df2b8733ff1ed05fce71de", size = 17859160 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/a5/fbf1f2b54adab31510728edd06a05c1b30839f37cf8c9747cb85831aaf1b/numpy-2.2.2-cp313-cp313-win32.whl", hash = "sha256:4dbd80e453bd34bd003b16bd802fac70ad76bd463f81f0c518d1245b1c55e3d9", size = 6273337 },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/e5/01106b9291ef1d680f82bc47d0c5b5e26dfed15b0754928e8f856c82c881/numpy-2.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:5a8c863ceacae696aff37d1fd636121f1a512117652e5dfb86031c8d84836369", size = 12609010 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/30/f23d9876de0f08dceb707c4dcf7f8dd7588266745029debb12a3cdd40be6/numpy-2.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b3482cb7b3325faa5f6bc179649406058253d91ceda359c104dac0ad320e1391", size = 20924451 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/ec/6ea85b2da9d5dfa1dbb4cb3c76587fc8ddcae580cb1262303ab21c0926c4/numpy-2.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9491100aba630910489c1d0158034e1c9a6546f0b1340f716d522dc103788e39", size = 14122390 },
|
||||
{ url = "https://files.pythonhosted.org/packages/68/05/bfbdf490414a7dbaf65b10c78bc243f312c4553234b6d91c94eb7c4b53c2/numpy-2.2.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:41184c416143defa34cc8eb9d070b0a5ba4f13a0fa96a709e20584638254b317", size = 5156590 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/ec/fe2e91b2642b9d6544518388a441bcd65c904cea38d9ff998e2e8ebf808e/numpy-2.2.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:7dca87ca328f5ea7dafc907c5ec100d187911f94825f8700caac0b3f4c384b49", size = 6671958 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/6f/6531a78e182f194d33ee17e59d67d03d0d5a1ce7f6be7343787828d1bd4a/numpy-2.2.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bc61b307655d1a7f9f4b043628b9f2b721e80839914ede634e3d485913e1fb2", size = 14019950 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/fb/13c58591d0b6294a08cc40fcc6b9552d239d773d520858ae27f39997f2ae/numpy-2.2.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fad446ad0bc886855ddf5909cbf8cb5d0faa637aaa6277fb4b19ade134ab3c7", size = 16079759 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/f2/f2f8edd62abb4b289f65a7f6d1f3650273af00b91b7267a2431be7f1aec6/numpy-2.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:149d1113ac15005652e8d0d3f6fd599360e1a708a4f98e43c9c77834a28238cb", size = 15226139 },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/29/14a177f1a90b8ad8a592ca32124ac06af5eff32889874e53a308f850290f/numpy-2.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:106397dbbb1896f99e044efc90360d098b3335060375c26aa89c0d8a97c5f648", size = 17856316 },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/03/242ae8d7b97f4e0e4ab8dd51231465fb23ed5e802680d629149722e3faf1/numpy-2.2.2-cp313-cp313t-win32.whl", hash = "sha256:0eec19f8af947a61e968d5429f0bd92fec46d92b0008d0a6685b40d6adf8a4f4", size = 6329134 },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/94/cd9e9b04012c015cb6320ab3bf43bc615e248dddfeb163728e800a5d96f0/numpy-2.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:97b974d3ba0fb4612b77ed35d7627490e8e3dff56ab41454d9e8b23448940576", size = 12696208 },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/7e/1dd770ee68916ed358991ab62c2cc353ffd98d0b75b901d52183ca28e8bb/numpy-2.2.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b0531f0b0e07643eb089df4c509d30d72c9ef40defa53e41363eca8a8cc61495", size = 21047291 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/3c/ccd08578dc532a8e6927952339d4a02682b776d5e85be49ed0760308433e/numpy-2.2.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:e9e82dcb3f2ebbc8cb5ce1102d5f1c5ed236bf8a11730fb45ba82e2841ec21df", size = 6792494 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/28/8754b9aee4f97199f9a047f73bb644b5a2014994a6d7b061ba67134a42de/numpy-2.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d4142eb40ca6f94539e4db929410f2a46052a0fe7a2c1c59f6179c39938d2a", size = 16197312 },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/96/deb93f871f401045a684ca08a009382b247d14996d7a94fea6aa43c67b94/numpy-2.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:356ca982c188acbfa6af0d694284d8cf20e95b1c3d0aefa8929376fea9146f60", size = 12822674 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/e1/1816d5d527fa870b260a1c2c5904d060caad7515637bd54f495a5ce13ccd/numpy-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cbc6472e01952d3d1b2772b720428f8b90e2deea8344e854df22b0618e9cce71", size = 21232911 },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/46/9f25dc19b359f10c0e52b6bac25d3181eb1f4b4d04c9846a32cf5ea52762/numpy-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdfe0c22692a30cd830c0755746473ae66c4a8f2e7bd508b35fb3b6a0813d787", size = 14371955 },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/d7/de941296e6b09a5c81d3664ad912f1496a0ecdd2f403318e5e35604ff70f/numpy-2.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:e37242f5324ffd9f7ba5acf96d774f9276aa62a966c0bad8dae692deebec7716", size = 5410476 },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/ce/55f685995110f8a268fdca0f198c9a84fa87b39512830965cc1087af6391/numpy-2.2.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:95172a21038c9b423e68be78fd0be6e1b97674cde269b76fe269a5dfa6fadf0b", size = 6945730 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/84/abdb9f6e22576d89c259401c3234d4755b322539491bbcffadc8bcb120d3/numpy-2.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b47c440210c5d1d67e1cf434124e0b5c395eee1f5806fdd89b553ed1acd0a3", size = 14350752 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/88/3870cfa9bef4dffb3a326507f430e6007eeac258ebeef6b76fc542aef66d/numpy-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0391ea3622f5c51a2e29708877d56e3d276827ac5447d7f45e9bc4ade8923c52", size = 16399386 },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/10/3f629682dd0b457525c131945329c4e81e2dadeb11256e6ce4c9a1a6fb41/numpy-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f6b3dfc7661f8842babd8ea07e9897fe3d9b69a1d7e5fbb743e4160f9387833b", size = 15561826 },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/18/fd35673ba9751eba449d4ce5d24d94e3b612cdbfba79348da71488c0b7ac/numpy-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1ad78ce7f18ce4e7df1b2ea4019b5817a2f6a8a16e34ff2775f646adce0a5027", size = 18188593 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/4c/c0f897b580ea59484b4cc96a441fea50333b26675a60a1421bc912268b5f/numpy-2.2.3-cp310-cp310-win32.whl", hash = "sha256:5ebeb7ef54a7be11044c33a17b2624abe4307a75893c001a4800857956b41094", size = 6590421 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/5b/aaabbfc7060c5c8f0124c5deb5e114a3b413a548bbc64e372c5b5db36165/numpy-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:596140185c7fa113563c67c2e894eabe0daea18cf8e33851738c19f70ce86aeb", size = 12925667 },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/86/453aa3949eab6ff54e2405f9cb0c01f756f031c3dc2a6d60a1d40cba5488/numpy-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:16372619ee728ed67a2a606a614f56d3eabc5b86f8b615c79d01957062826ca8", size = 21237256 },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/c3/93ecceadf3e155d6a9e4464dd2392d8d80cf436084c714dc8535121c83e8/numpy-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5521a06a3148686d9269c53b09f7d399a5725c47bbb5b35747e1cb76326b714b", size = 14408049 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/29/076999b69bd9264b8df5e56f2be18da2de6b2a2d0e10737e5307592e01de/numpy-2.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:7c8dde0ca2f77828815fd1aedfdf52e59071a5bae30dac3b4da2a335c672149a", size = 5408655 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/a7/b14f0a73eb0fe77cb9bd5b44534c183b23d4229c099e339c522724b02678/numpy-2.2.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:77974aba6c1bc26e3c205c2214f0d5b4305bdc719268b93e768ddb17e3fdd636", size = 6949996 },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/2f/8063da0616bb0f414b66dccead503bd96e33e43685c820e78a61a214c098/numpy-2.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d42f9c36d06440e34226e8bd65ff065ca0963aeecada587b937011efa02cdc9d", size = 14355789 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/d7/3cd47b00b8ea95ab358c376cf5602ad21871410950bc754cf3284771f8b6/numpy-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2712c5179f40af9ddc8f6727f2bd910ea0eb50206daea75f58ddd9fa3f715bb", size = 16411356 },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/c0/a2379e202acbb70b85b41483a422c1e697ff7eee74db642ca478de4ba89f/numpy-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c8b0451d2ec95010d1db8ca733afc41f659f425b7f608af569711097fd6014e2", size = 15576770 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/63/a13ee650f27b7999e5b9e1964ae942af50bb25606d088df4229283eda779/numpy-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9b4a8148c57ecac25a16b0e11798cbe88edf5237b0df99973687dd866f05e1b", size = 18200483 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/87/e71f89935e09e8161ac9c590c82f66d2321eb163893a94af749dfa8a3cf8/numpy-2.2.3-cp311-cp311-win32.whl", hash = "sha256:1f45315b2dc58d8a3e7754fe4e38b6fce132dab284a92851e41b2b344f6441c5", size = 6588415 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/c6/cd4298729826af9979c5f9ab02fcaa344b82621e7c49322cd2d210483d3f/numpy-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f48ba6f6c13e5e49f3d3efb1b51c8193215c42ac82610a04624906a9270be6f", size = 12929604 },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/ec/43628dcf98466e087812142eec6d1c1a6c6bdfdad30a0aa07b872dc01f6f/numpy-2.2.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12c045f43b1d2915eca6b880a7f4a256f59d62df4f044788c8ba67709412128d", size = 20929458 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/c0/2f4225073e99a5c12350954949ed19b5d4a738f541d33e6f7439e33e98e4/numpy-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:87eed225fd415bbae787f93a457af7f5990b92a334e346f72070bf569b9c9c95", size = 14115299 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/fa/d2c5575d9c734a7376cc1592fae50257ec95d061b27ee3dbdb0b3b551eb2/numpy-2.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:712a64103d97c404e87d4d7c47fb0c7ff9acccc625ca2002848e0d53288b90ea", size = 5145723 },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/dc/023dad5b268a7895e58e791f28dc1c60eb7b6c06fcbc2af8538ad069d5f3/numpy-2.2.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a5ae282abe60a2db0fd407072aff4599c279bcd6e9a2475500fc35b00a57c532", size = 6678797 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/19/bcd641ccf19ac25abb6fb1dcd7744840c11f9d62519d7057b6ab2096eb60/numpy-2.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5266de33d4c3420973cf9ae3b98b54a2a6d53a559310e3236c4b2b06b9c07d4e", size = 14067362 },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/04/78d2e7402fb479d893953fb78fa7045f7deb635ec095b6b4f0260223091a/numpy-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b787adbf04b0db1967798dba8da1af07e387908ed1553a0d6e74c084d1ceafe", size = 16116679 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/a1/e90f7aa66512be3150cb9d27f3d9995db330ad1b2046474a13b7040dfd92/numpy-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:34c1b7e83f94f3b564b35f480f5652a47007dd91f7c839f404d03279cc8dd021", size = 15264272 },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/b6/50bd027cca494de4fa1fc7bf1662983d0ba5f256fa0ece2c376b5eb9b3f0/numpy-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4d8335b5f1b6e2bce120d55fb17064b0262ff29b459e8493d1785c18ae2553b8", size = 17880549 },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/30/f7bf4acb5f8db10a96f73896bdeed7a63373137b131ca18bd3dab889db3b/numpy-2.2.3-cp312-cp312-win32.whl", hash = "sha256:4d9828d25fb246bedd31e04c9e75714a4087211ac348cb39c8c5f99dbb6683fe", size = 6293394 },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/6e/55580a538116d16ae7c9aa17d4edd56e83f42126cb1dfe7a684da7925d2c/numpy-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:83807d445817326b4bcdaaaf8e8e9f1753da04341eceec705c001ff342002e5d", size = 12626357 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/8b/88b98ed534d6a03ba8cddb316950fe80842885709b58501233c29dfa24a9/numpy-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bfdb06b395385ea9b91bf55c1adf1b297c9fdb531552845ff1d3ea6e40d5aba", size = 20916001 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/b4/def6ec32c725cc5fbd8bdf8af80f616acf075fe752d8a23e895da8c67b70/numpy-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:23c9f4edbf4c065fddb10a4f6e8b6a244342d95966a48820c614891e5059bb50", size = 14130721 },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/60/70af0acc86495b25b672d403e12cb25448d79a2b9658f4fc45e845c397a8/numpy-2.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:a0c03b6be48aaf92525cccf393265e02773be8fd9551a2f9adbe7db1fa2b60f1", size = 5130999 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/69/d96c006fb73c9a47bcb3611417cf178049aae159afae47c48bd66df9c536/numpy-2.2.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:2376e317111daa0a6739e50f7ee2a6353f768489102308b0d98fcf4a04f7f3b5", size = 6665299 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/3f/d8a877b6e48103733ac224ffa26b30887dc9944ff95dffdfa6c4ce3d7df3/numpy-2.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fb62fe3d206d72fe1cfe31c4a1106ad2b136fcc1606093aeab314f02930fdf2", size = 14064096 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/43/619c2c7a0665aafc80efca465ddb1f260287266bdbdce517396f2f145d49/numpy-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52659ad2534427dffcc36aac76bebdd02b67e3b7a619ac67543bc9bfe6b7cdb1", size = 16114758 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/79/ee4fe4f60967ccd3897aa71ae14cdee9e3c097e3256975cc9575d393cb42/numpy-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1b416af7d0ed3271cad0f0a0d0bee0911ed7eba23e66f8424d9f3dfcdcae1304", size = 15259880 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/c8/8b55cf05db6d85b7a7d414b3d1bd5a740706df00bfa0824a08bf041e52ee/numpy-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1402da8e0f435991983d0a9708b779f95a8c98c6b18a171b9f1be09005e64d9d", size = 17876721 },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/d6/b4c2f0564b7dcc413117b0ffbb818d837e4b29996b9234e38b2025ed24e7/numpy-2.2.3-cp313-cp313-win32.whl", hash = "sha256:136553f123ee2951bfcfbc264acd34a2fc2f29d7cdf610ce7daf672b6fbaa693", size = 6290195 },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/e7/7d55a86719d0de7a6a597949f3febefb1009435b79ba510ff32f05a8c1d7/numpy-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5b732c8beef1d7bc2d9e476dbba20aaff6167bf205ad9aa8d30913859e82884b", size = 12619013 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/1f/0b863d5528b9048fd486a56e0b97c18bf705e88736c8cea7239012119a54/numpy-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:435e7a933b9fda8126130b046975a968cc2d833b505475e588339e09f7672890", size = 20944621 },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/99/b478c384f7a0a2e0736177aafc97dc9152fc036a3fdb13f5a3ab225f1494/numpy-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7678556eeb0152cbd1522b684dcd215250885993dd00adb93679ec3c0e6e091c", size = 14142502 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/61/2d9a694a0f9cd0a839501d362de2a18de75e3004576a3008e56bdd60fcdb/numpy-2.2.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2e8da03bd561504d9b20e7a12340870dfc206c64ea59b4cfee9fceb95070ee94", size = 5176293 },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/35/51e94011b23e753fa33f891f601e5c1c9a3d515448659b06df9d40c0aa6e/numpy-2.2.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:c9aa4496fd0e17e3843399f533d62857cef5900facf93e735ef65aa4bbc90ef0", size = 6691874 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/cf/06e37619aad98a9d03bd8d65b8e3041c3a639be0f5f6b0a0e2da544538d4/numpy-2.2.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4ca91d61a4bf61b0f2228f24bbfa6a9facd5f8af03759fe2a655c50ae2c6610", size = 14036826 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/93/5d7d19955abd4d6099ef4a8ee006f9ce258166c38af259f9e5558a172e3e/numpy-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deaa09cd492e24fd9b15296844c0ad1b3c976da7907e1c1ed3a0ad21dded6f76", size = 16096567 },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/53/d1c599acf7732d81f46a93621dab6aa8daad914b502a7a115b3f17288ab2/numpy-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:246535e2f7496b7ac85deffe932896a3577be7af8fb7eebe7146444680297e9a", size = 15242514 },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/43/c0f5411c7b3ea90adf341d05ace762dad8cb9819ef26093e27b15dd121ac/numpy-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:daf43a3d1ea699402c5a850e5313680ac355b4adc9770cd5cfc2940e7861f1bf", size = 17872920 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/57/6dbdd45ab277aff62021cafa1e15f9644a52f5b5fc840bc7591b4079fb58/numpy-2.2.3-cp313-cp313t-win32.whl", hash = "sha256:cf802eef1f0134afb81fef94020351be4fe1d6681aadf9c5e862af6602af64ef", size = 6346584 },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/9b/484f7d04b537d0a1202a5ba81c6f53f1846ae6c63c2127f8df869ed31342/numpy-2.2.3-cp313-cp313t-win_amd64.whl", hash = "sha256:aee2512827ceb6d7f517c8b85aa5d3923afe8fc7a57d028cffcd522f1c6fd082", size = 12706784 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/b5/a7839f5478be8f859cb880f13d90fcfe4b0ec7a9ebaff2bcc30d96760596/numpy-2.2.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3c2ec8a0f51d60f1e9c0c5ab116b7fc104b165ada3f6c58abf881cb2eb16044d", size = 21064244 },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/e8/5da32ffcaa7a72f7ecd82f90c062140a061eb823cb88e90279424e515cf4/numpy-2.2.3-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ed2cf9ed4e8ebc3b754d398cba12f24359f018b416c380f577bbae112ca52fc9", size = 6809418 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/a9/68aa7076c7656a7308a0f73d0a2ced8c03f282c9fd98fa7ce21c12634087/numpy-2.2.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39261798d208c3095ae4f7bc8eaeb3481ea8c6e03dc48028057d3cbdbdb8937e", size = 16215461 },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/7f/d322a4125405920401450118dbdc52e0384026bd669939484670ce8b2ab9/numpy-2.2.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:783145835458e60fa97afac25d511d00a1eca94d4a8f3ace9fe2043003c678e4", size = 12839607 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -911,27 +911,27 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.9.4"
|
||||
version = "0.9.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c0/17/529e78f49fc6f8076f50d985edd9a2cf011d1dbadb1cdeacc1d12afc1d26/ruff-0.9.4.tar.gz", hash = "sha256:6907ee3529244bb0ed066683e075f09285b38dd5b4039370df6ff06041ca19e7", size = 3599458 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2a/e1/e265aba384343dd8ddd3083f5e33536cd17e1566c41453a5517b5dd443be/ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9", size = 3639454 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/f8/3fafb7804d82e0699a122101b5bee5f0d6e17c3a806dcbc527bb7d3f5b7a/ruff-0.9.4-py3-none-linux_armv6l.whl", hash = "sha256:64e73d25b954f71ff100bb70f39f1ee09e880728efb4250c632ceed4e4cdf706", size = 11668400 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/a6/2efa772d335da48a70ab2c6bb41a096c8517ca43c086ea672d51079e3d1f/ruff-0.9.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6ce6743ed64d9afab4fafeaea70d3631b4d4b28b592db21a5c2d1f0ef52934bf", size = 11628395 },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/d7/cd822437561082f1c9d7225cc0d0fbb4bad117ad7ac3c41cd5d7f0fa948c/ruff-0.9.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:54499fb08408e32b57360f6f9de7157a5fec24ad79cb3f42ef2c3f3f728dfe2b", size = 11090052 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/67/3660d58e893d470abb9a13f679223368ff1684a4ef40f254a0157f51b448/ruff-0.9.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37c892540108314a6f01f105040b5106aeb829fa5fb0561d2dcaf71485021137", size = 11882221 },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/d1/757559995c8ba5f14dfec4459ef2dd3fcea82ac43bc4e7c7bf47484180c0/ruff-0.9.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de9edf2ce4b9ddf43fd93e20ef635a900e25f622f87ed6e3047a664d0e8f810e", size = 11424862 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/96/7915a7c6877bb734caa6a2af424045baf6419f685632469643dbd8eb2958/ruff-0.9.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87c90c32357c74f11deb7fbb065126d91771b207bf9bfaaee01277ca59b574ec", size = 12626735 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/cc/dadb9b35473d7cb17c7ffe4737b4377aeec519a446ee8514123ff4a26091/ruff-0.9.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56acd6c694da3695a7461cc55775f3a409c3815ac467279dfa126061d84b314b", size = 13255976 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/c3/ad2dd59d3cabbc12df308cced780f9c14367f0321e7800ca0fe52849da4c/ruff-0.9.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0c93e7d47ed951b9394cf352d6695b31498e68fd5782d6cbc282425655f687a", size = 12752262 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/17/5f1971e54bd71604da6788efd84d66d789362b1105e17e5ccc53bba0289b/ruff-0.9.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4c8772670aecf037d1bf7a07c39106574d143b26cfe5ed1787d2f31e800214", size = 14401648 },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/24/6200b13ea611b83260501b6955b764bb320e23b2b75884c60ee7d3f0b68e/ruff-0.9.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfc5f1d7afeda8d5d37660eeca6d389b142d7f2b5a1ab659d9214ebd0e025231", size = 12414702 },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/cb/f5d50d0c4ecdcc7670e348bd0b11878154bc4617f3fdd1e8ad5297c0d0ba/ruff-0.9.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:faa935fc00ae854d8b638c16a5f1ce881bc3f67446957dd6f2af440a5fc8526b", size = 11859608 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/f4/9c8499ae8426da48363bbb78d081b817b0f64a9305f9b7f87eab2a8fb2c1/ruff-0.9.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a6c634fc6f5a0ceae1ab3e13c58183978185d131a29c425e4eaa9f40afe1e6d6", size = 11485702 },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/59/30490e483e804ccaa8147dd78c52e44ff96e1c30b5a95d69a63163cdb15b/ruff-0.9.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:433dedf6ddfdec7f1ac7575ec1eb9844fa60c4c8c2f8887a070672b8d353d34c", size = 12067782 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/8c/893fa9551760b2f8eb2a351b603e96f15af167ceaf27e27ad873570bc04c/ruff-0.9.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d612dbd0f3a919a8cc1d12037168bfa536862066808960e0cc901404b77968f0", size = 12483087 },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/15/f6751c07c21ca10e3f4a51ea495ca975ad936d780c347d9808bcedbd7182/ruff-0.9.4-py3-none-win32.whl", hash = "sha256:db1192ddda2200671f9ef61d9597fcef89d934f5d1705e571a93a67fb13a4402", size = 9852302 },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/41/2d2d2c6a72e62566f730e49254f602dfed23019c33b5b21ea8f8917315a1/ruff-0.9.4-py3-none-win_amd64.whl", hash = "sha256:05bebf4cdbe3ef75430d26c375773978950bbf4ee3c95ccb5448940dc092408e", size = 10850051 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/e6/3d6ec3bc3d254e7f005c543a661a41c3e788976d0e52a1ada195bd664344/ruff-0.9.4-py3-none-win_arm64.whl", hash = "sha256:585792f1e81509e38ac5123492f8875fbc36f3ede8185af0a26df348e5154f41", size = 10078251 },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/e3/3d2c022e687e18cf5d93d6bfa2722d46afc64eaa438c7fbbdd603b3597be/ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba", size = 11714128 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/22/aff073b70f95c052e5c58153cba735748c9e70107a77d03420d7850710a0/ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504", size = 11682539 },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/a7/f5b7390afd98a7918582a3d256cd3e78ba0a26165a467c1820084587cbf9/ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83", size = 11132512 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/e3/45de13ef65047fea2e33f7e573d848206e15c715e5cd56095589a7733d04/ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc", size = 11929275 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/f2/23d04cd6c43b2e641ab961ade8d0b5edb212ecebd112506188c91f2a6e6c/ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b", size = 11466502 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/6f/3a8cf166f2d7f1627dd2201e6cbc4cb81f8b7d58099348f0c1ff7b733792/ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e", size = 12676364 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/c4/db52e2189983c70114ff2b7e3997e48c8318af44fe83e1ce9517570a50c6/ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666", size = 13335518 },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/44/545f8a4d136830f08f4d24324e7db957c5374bf3a3f7a6c0bc7be4623a37/ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5", size = 12823287 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/26/8208ef9ee7431032c143649a9967c3ae1aae4257d95e6f8519f07309aa66/ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5", size = 14592374 },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/70/e917781e55ff39c5b5208bda384fd397ffd76605e68544d71a7e40944945/ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217", size = 12500173 },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/f5/e4ddee07660f5a9622a9c2b639afd8f3104988dc4f6ba0b73ffacffa9a8c/ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6", size = 11906555 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/2b/6ff2fe383667075eef8656b9892e73dd9b119b5e3add51298628b87f6429/ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897", size = 11538958 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/db/98e59e90de45d1eb46649151c10a062d5707b5b7f76f64eb1e29edf6ebb1/ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08", size = 12117247 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/bc/54e38f6d219013a9204a5a2015c09e7a8c36cedcd50a4b01ac69a550b9d9/ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656", size = 12554647 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/7d/7b461ab0e2404293c0627125bb70ac642c2e8d55bf590f6fce85f508f1b2/ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d", size = 9949214 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/30/c3cee10f915ed75a5c29c1e57311282d1a15855551a64795c1b2bbe5cf37/ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa", size = 10999914 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/a8/d71f44b93e3aa86ae232af1f2126ca7b95c0f515ec135462b3e1f351441c/ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a", size = 10177499 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4,11 +4,9 @@ build-backend = "pdm.backend"
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.34",
|
||||
]
|
||||
dependencies = ["langchain-core<1.0.0,>=0.3.34"]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.6"
|
||||
description = "LangChain text splitting utilities"
|
||||
@ -20,20 +18,14 @@ readme = "README.md"
|
||||
repository = "https://github.com/langchain-ai/langchain"
|
||||
|
||||
[dependency-groups]
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.9.2",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
lint = ["ruff<1.0.0,>=0.9.2", "langchain-core"]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"lxml-stubs<1.0.0,>=0.5.1",
|
||||
"types-requests<3.0.0.0,>=2.31.0.20240218",
|
||||
"tiktoken<1.0.0,>=0.8.0",
|
||||
]
|
||||
dev = [
|
||||
"jupyter<2.0.0,>=1.0.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
dev = ["jupyter<2.0.0,>=1.0.0", "langchain-core"]
|
||||
test = [
|
||||
"pytest<9,>=8",
|
||||
"freezegun<2.0.0,>=1.2.2",
|
||||
@ -42,7 +34,7 @@ test = [
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"pytest-xdist<4.0.0,>=3.6.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-core",
|
||||
]
|
||||
test_integration = [
|
||||
"spacy<3.8.4,>=3.0.0; python_version < \"3.10.0\"",
|
||||
@ -52,6 +44,8 @@ test_integration = [
|
||||
"sentence-transformers>=2.6.0; python_version < \"3.13\"",
|
||||
]
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "../core", editable = true }
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
|
@ -1079,8 +1079,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34"
|
||||
source = { directory = "../core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -1177,20 +1177,20 @@ typing = [
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "langchain-core", specifier = ">=0.3.34,<1.0.0" }]
|
||||
requires-dist = [{ name = "langchain-core", editable = "../core" }]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
]
|
||||
lint = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "../core" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
|
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = []
|
||||
name = "langchain-monorepo"
|
||||
@ -12,33 +12,29 @@ readme = "README.md"
|
||||
repository = "https://www.github.com/langchain-ai/langchain"
|
||||
|
||||
[dependency-groups]
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.5.0",
|
||||
]
|
||||
lint = ["ruff<1.0.0,>=0.5.0"]
|
||||
dev = [
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/libs/core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/libs/text-splitters",
|
||||
"langchain-community @ file:///${PROJECT_ROOT}/libs/community",
|
||||
"langchain @ file:///${PROJECT_ROOT}/libs/langchain",
|
||||
"langchain-openai @ file:///${PROJECT_ROOT}/libs/partners/openai",
|
||||
"langchain-core",
|
||||
"langchain-text-splitters",
|
||||
"langchain-community",
|
||||
"langchain",
|
||||
"langchain-openai",
|
||||
"ipykernel<7.0.0,>=6.29.2",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||
typing = []
|
||||
test = [
|
||||
"langchain-experimental @ git+https://github.com/langchain-ai/langchain-experimental.git#subdirectory=libs/experimental",
|
||||
"langchain-anthropic @ file:///${PROJECT_ROOT}/libs/partners/anthropic",
|
||||
"langchain-anthropic",
|
||||
"langchain-aws @ git+https://github.com/langchain-ai/langchain-aws.git#subdirectory=libs/aws",
|
||||
"langchain-chroma @ file:///${PROJECT_ROOT}/libs/partners/chroma",
|
||||
"langchain-fireworks @ file:///${PROJECT_ROOT}/libs/partners/fireworks",
|
||||
"langchain-chroma",
|
||||
"langchain-fireworks",
|
||||
"langchain-google-vertexai @ git+https://github.com/langchain-ai/langchain-google.git#subdirectory=libs/vertexai",
|
||||
"langchain-groq @ file:///${PROJECT_ROOT}/libs/partners/groq",
|
||||
"langchain-mistralai @ file:///${PROJECT_ROOT}/libs/partners/mistralai",
|
||||
"langchain-groq",
|
||||
"langchain-mistralai",
|
||||
"langchain-together @ git+https://github.com/langchain-ai/langchain-together.git#subdirectory=libs/together",
|
||||
"langchain-unstructured @ git+https://github.com/langchain-ai/langchain-unstructured.git#subdirectory=libs/unstructured",
|
||||
"langgraph @ git+https://github.com/langchain-ai/langgraph.git#subdirectory=libs/langgraph",
|
||||
"langgraph",
|
||||
"jupyter<2.0.0,>=1.1.1",
|
||||
"click<9.0.0,>=8.1.7",
|
||||
"aiofiles<25.0.0,>=24.1.0",
|
||||
@ -54,6 +50,20 @@ test = [
|
||||
"vcrpy<7.0.0,>=6.0.1",
|
||||
]
|
||||
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-core = { path = "./libs/core", editable = true }
|
||||
langchain-text-splitters = { path = "./libs/text-splitters", editable = true }
|
||||
langchain-tests = { path = "./libs/standard-tests", editable = true }
|
||||
langchain-community = { path = "./libs/community", editable = true }
|
||||
langchain = { path = "./libs/langchain", editable = true }
|
||||
langchain-openai = { path = "./libs/partners/openai", editable = true }
|
||||
langchain-anthropic = { path = "./libs/partners/anthropic", editable = true }
|
||||
langchain-chroma = { path = "./libs/partners/chroma", editable = true }
|
||||
langchain-fireworks = { path = "./libs/partners/fireworks", editable = true }
|
||||
langchain-groq = { path = "./libs/partners/groq", editable = true }
|
||||
langchain-mistralai = { path = "./libs/partners/mistralai", editable = true }
|
||||
|
||||
[build-system]
|
||||
requires = ["pdm-backend"]
|
||||
build-backend = "pdm.backend"
|
||||
@ -84,9 +94,9 @@ pydocstyle = { convention = "google" }
|
||||
"F401", # allow "imported but unused" example code
|
||||
"F811", # allow re-importing the same module, so that cells can stay independent
|
||||
"F841", # allow assignments to variables that are never read -- it's example code
|
||||
|
||||
|
||||
]
|
||||
"!libs/langchain/langchain/model_laboratory.py"=["D"]
|
||||
"!libs/langchain/langchain/model_laboratory.py" = ["D"]
|
||||
|
||||
# These files were failing the listed rules at the time ruff was adopted for notebooks.
|
||||
# Don't require them to change at once, though we should look into them eventually.
|
||||
|
187
uv.lock
187
uv.lock
@ -2152,8 +2152,8 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.3.18rc1"
|
||||
source = { directory = "libs/langchain" }
|
||||
version = "0.3.18"
|
||||
source = { editable = "libs/langchain" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11'" },
|
||||
@ -2176,7 +2176,7 @@ requires-dist = [
|
||||
{ name = "langchain-aws", marker = "extra == 'aws'" },
|
||||
{ name = "langchain-cohere", marker = "extra == 'cohere'" },
|
||||
{ name = "langchain-community", marker = "extra == 'community'" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-deepseek", marker = "extra == 'deepseek'" },
|
||||
{ name = "langchain-fireworks", marker = "extra == 'fireworks'" },
|
||||
{ name = "langchain-google-genai", marker = "extra == 'google-genai'" },
|
||||
@ -2185,8 +2185,8 @@ requires-dist = [
|
||||
{ name = "langchain-huggingface", marker = "extra == 'huggingface'" },
|
||||
{ name = "langchain-mistralai", marker = "extra == 'mistralai'" },
|
||||
{ name = "langchain-ollama", marker = "extra == 'ollama'" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'" },
|
||||
{ name = "langchain-text-splitters", specifier = ">=0.3.3,<1.0.0" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'", editable = "libs/partners/openai" },
|
||||
{ name = "langchain-text-splitters", editable = "libs/text-splitters" },
|
||||
{ name = "langchain-together", marker = "extra == 'together'" },
|
||||
{ name = "langsmith", specifier = ">=0.1.17,<0.4" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.26.4,<2" },
|
||||
@ -2202,8 +2202,8 @@ requires-dist = [
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-text-splitters", editable = "libs/text-splitters" },
|
||||
{ name = "playwright", specifier = ">=1.28.0,<2.0.0" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
@ -2213,14 +2213,15 @@ lint = [
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = ">=1.5.14,<1.6" },
|
||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "duckdb-engine", specifier = ">=0.9.2,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-openai", directory = "../partners/openai" },
|
||||
{ name = "langchain-tests", directory = "../standard-tests" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-openai", editable = "libs/partners/openai" },
|
||||
{ name = "langchain-tests", editable = "libs/standard-tests" },
|
||||
{ name = "langchain-text-splitters", editable = "libs/text-splitters" },
|
||||
{ name = "lark", specifier = ">=1.1.5,<2.0.0" },
|
||||
{ name = "packaging", specifier = ">=24.2" },
|
||||
{ name = "pandas", specifier = ">=2.0.0,<3.0.0" },
|
||||
@ -2239,8 +2240,8 @@ test = [
|
||||
]
|
||||
test-integration = [
|
||||
{ name = "cassio", specifier = ">=0.1.0,<1.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-text-splitters", editable = "libs/text-splitters" },
|
||||
{ name = "langchainhub", specifier = ">=0.1.16,<1.0.0" },
|
||||
{ name = "pytest-vcr", specifier = ">=1.0.2,<2.0.0" },
|
||||
{ name = "python-dotenv", specifier = ">=1.0.0,<2.0.0" },
|
||||
@ -2248,8 +2249,8 @@ test-integration = [
|
||||
{ name = "wrapt", specifier = ">=1.15.0,<2.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-text-splitters", editable = "libs/text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "mypy-protobuf", specifier = ">=3.0.0,<4.0.0" },
|
||||
{ name = "types-chardet", specifier = ">=5.0.4.6,<6.0.0.0" },
|
||||
@ -2262,8 +2263,8 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-anthropic"
|
||||
version = "0.3.6"
|
||||
source = { directory = "libs/partners/anthropic" }
|
||||
version = "0.3.7"
|
||||
source = { editable = "libs/partners/anthropic" }
|
||||
dependencies = [
|
||||
{ name = "anthropic" },
|
||||
{ name = "langchain-core" },
|
||||
@ -2273,19 +2274,19 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "anthropic", specifier = ">=0.45.0,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "pydantic", specifier = ">=2.7.4,<3.0.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "libs/core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "defusedxml", specifier = ">=0.7.1,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-tests", editable = "libs/standard-tests" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
@ -2295,11 +2296,11 @@ test = [
|
||||
{ name = "syrupy", specifier = ">=4.0.2,<5.0.0" },
|
||||
]
|
||||
test-integration = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "requests", specifier = ">=2.32.3,<3.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-requests", specifier = ">=2.32.0.20241016,<3.0.0.0" },
|
||||
]
|
||||
@ -2317,8 +2318,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-chroma"
|
||||
version = "0.2.1"
|
||||
source = { directory = "libs/partners/chroma" }
|
||||
version = "0.2.2"
|
||||
source = { editable = "libs/partners/chroma" }
|
||||
dependencies = [
|
||||
{ name = "chromadb" },
|
||||
{ name = "langchain-core" },
|
||||
@ -2328,17 +2329,14 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "chromadb", specifier = ">=0.4.0,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,!=0.5.10,!=0.5.11,!=0.5.12,<0.7.0" },
|
||||
{ name = "langchain-core", specifier = ">=0.2.43,!=0.3.0,!=0.3.1,!=0.3.2,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,<0.4.0" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.22.4,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<2.0.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "langchain-core", marker = "python_full_version < '3.9'", specifier = ">=0.1.40,<0.3" },
|
||||
{ name = "langchain-core", marker = "python_full_version >= '3.9'", directory = "../../core" },
|
||||
]
|
||||
dev = [{ name = "langchain-core", editable = "libs/core" }]
|
||||
lint = [
|
||||
{ name = "onnxruntime", marker = "python_full_version < '3.10'", specifier = "<1.20" },
|
||||
{ name = "onnxruntime", marker = "python_full_version >= '3.10'" },
|
||||
@ -2346,9 +2344,8 @@ lint = [
|
||||
]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", marker = "python_full_version < '3.9'", specifier = ">=0.1.40,<0.3" },
|
||||
{ name = "langchain-core", marker = "python_full_version >= '3.9'", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-tests", editable = "libs/standard-tests" },
|
||||
{ name = "onnxruntime", marker = "python_full_version < '3.10'", specifier = "<1.20" },
|
||||
{ name = "onnxruntime", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
@ -2360,16 +2357,15 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", marker = "python_full_version < '3.9'", specifier = ">=0.1.40,<0.3" },
|
||||
{ name = "langchain-core", marker = "python_full_version >= '3.9'", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-requests", specifier = ">=2.31.0.20240406,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-community"
|
||||
version = "0.3.17rc1"
|
||||
source = { directory = "libs/community" }
|
||||
version = "0.3.17"
|
||||
source = { editable = "libs/community" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "dataclasses-json" },
|
||||
@ -2390,8 +2386,8 @@ requires-dist = [
|
||||
{ name = "aiohttp", specifier = ">=3.8.3,<4.0.0" },
|
||||
{ name = "dataclasses-json", specifier = ">=0.5.7,<0.7" },
|
||||
{ name = "httpx-sse", specifier = ">=0.4.0,<1.0.0" },
|
||||
{ name = "langchain", specifier = ">=0.3.18rc1,<1.0.0" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.34rc1,<1.0.0" },
|
||||
{ name = "langchain", editable = "libs/langchain" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langsmith", specifier = ">=0.1.125,<0.4" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.26.4,<2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.2,<3" },
|
||||
@ -2406,22 +2402,23 @@ requires-dist = [
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
lint = [
|
||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "ruff", specifier = ">=0.5,<0.6" },
|
||||
{ name = "ruff", specifier = ">=0.9,<0.10" },
|
||||
]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = ">=1.5.13,<1.6" },
|
||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "duckdb-engine", specifier = ">=0.13.6,<1.0.0" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain", directory = "." },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-tests", directory = "../standard-tests" },
|
||||
{ name = "langchain", editable = "libs/langchain" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-tests", editable = "libs/standard-tests" },
|
||||
{ name = "lark", specifier = ">=1.1.5,<2.0.0" },
|
||||
{ name = "pandas", specifier = ">=2.0.0,<3.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.4.4,<8.0.0" },
|
||||
@ -2442,9 +2439,9 @@ test-integration = [
|
||||
{ name = "vcrpy", specifier = ">=6,<7" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain", directory = "." },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "langchain", editable = "libs/langchain" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-text-splitters", editable = "libs/text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.12,<2.0" },
|
||||
{ name = "mypy-protobuf", specifier = ">=3.0.0,<4.0.0" },
|
||||
{ name = "types-chardet", specifier = ">=5.0.4.6,<6.0.0.0" },
|
||||
@ -2457,8 +2454,8 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.34rc1"
|
||||
source = { directory = "libs/core" }
|
||||
version = "0.3.35"
|
||||
source = { editable = "libs/core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
@ -2525,7 +2522,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "langchain-fireworks"
|
||||
version = "0.2.7"
|
||||
source = { directory = "libs/partners/fireworks" }
|
||||
source = { editable = "libs/partners/fireworks" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "fireworks-ai" },
|
||||
@ -2538,19 +2535,19 @@ dependencies = [
|
||||
requires-dist = [
|
||||
{ name = "aiohttp", specifier = ">=3.9.1,<4.0.0" },
|
||||
{ name = "fireworks-ai", specifier = ">=0.13.0" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "openai", specifier = ">=1.10.0,<2.0.0" },
|
||||
{ name = "requests", specifier = ">=2,<3" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "libs/core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-tests", editable = "libs/standard-tests" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
@ -2560,7 +2557,7 @@ test = [
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-requests", specifier = ">=2,<3" },
|
||||
]
|
||||
@ -2581,7 +2578,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "langchain-groq"
|
||||
version = "0.2.4"
|
||||
source = { directory = "libs/partners/groq" }
|
||||
source = { editable = "libs/partners/groq" }
|
||||
dependencies = [
|
||||
{ name = "groq" },
|
||||
{ name = "langchain-core" },
|
||||
@ -2590,31 +2587,31 @@ dependencies = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "groq", specifier = ">=0.4.1,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "libs/core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-tests", editable = "libs/standard-tests" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
]
|
||||
test-integration = [{ name = "langchain-core", directory = "../../core" }]
|
||||
test-integration = [{ name = "langchain-core", editable = "libs/core" }]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-mistralai"
|
||||
version = "0.2.6"
|
||||
source = { directory = "libs/partners/mistralai" }
|
||||
source = { editable = "libs/partners/mistralai" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "httpx-sse" },
|
||||
@ -2627,25 +2624,25 @@ dependencies = [
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.2,<1" },
|
||||
{ name = "httpx-sse", specifier = ">=0.3.1,<1" },
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "pydantic", specifier = ">=2,<3" },
|
||||
{ name = "tokenizers", specifier = ">=0.15.1,<1" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "libs/core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-tests", editable = "libs/standard-tests" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
]
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
]
|
||||
|
||||
@ -2702,11 +2699,11 @@ test = [
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [
|
||||
{ name = "ipykernel", specifier = ">=6.29.2,<7.0.0" },
|
||||
{ name = "langchain", directory = "libs/langchain" },
|
||||
{ name = "langchain-community", directory = "libs/community" },
|
||||
{ name = "langchain-core", directory = "libs/core" },
|
||||
{ name = "langchain-openai", directory = "libs/partners/openai" },
|
||||
{ name = "langchain-text-splitters", directory = "libs/text-splitters" },
|
||||
{ name = "langchain", editable = "libs/langchain" },
|
||||
{ name = "langchain-community", editable = "libs/community" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-openai", editable = "libs/partners/openai" },
|
||||
{ name = "langchain-text-splitters", editable = "libs/text-splitters" },
|
||||
]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5.0,<1.0.0" }]
|
||||
test = [
|
||||
@ -2715,17 +2712,17 @@ test = [
|
||||
{ name = "faiss-cpu", specifier = ">=1.7.4,<2.0.0" },
|
||||
{ name = "grandalf", specifier = ">=0.8,<1.0" },
|
||||
{ name = "jupyter", specifier = ">=1.1.1,<2.0.0" },
|
||||
{ name = "langchain-anthropic", directory = "libs/partners/anthropic" },
|
||||
{ name = "langchain-anthropic", editable = "libs/partners/anthropic" },
|
||||
{ name = "langchain-aws", git = "https://github.com/langchain-ai/langchain-aws.git?subdirectory=libs%2Faws" },
|
||||
{ name = "langchain-chroma", directory = "libs/partners/chroma" },
|
||||
{ name = "langchain-chroma", editable = "libs/partners/chroma" },
|
||||
{ name = "langchain-experimental", git = "https://github.com/langchain-ai/langchain-experimental.git?subdirectory=libs%2Fexperimental" },
|
||||
{ name = "langchain-fireworks", directory = "libs/partners/fireworks" },
|
||||
{ name = "langchain-fireworks", editable = "libs/partners/fireworks" },
|
||||
{ name = "langchain-google-vertexai", git = "https://github.com/langchain-ai/langchain-google.git?subdirectory=libs%2Fvertexai" },
|
||||
{ name = "langchain-groq", directory = "libs/partners/groq" },
|
||||
{ name = "langchain-mistralai", directory = "libs/partners/mistralai" },
|
||||
{ name = "langchain-groq", editable = "libs/partners/groq" },
|
||||
{ name = "langchain-mistralai", editable = "libs/partners/mistralai" },
|
||||
{ name = "langchain-together", git = "https://github.com/langchain-ai/langchain-together.git?subdirectory=libs%2Ftogether" },
|
||||
{ name = "langchain-unstructured", git = "https://github.com/langchain-ai/langchain-unstructured.git?subdirectory=libs%2Funstructured" },
|
||||
{ name = "langgraph", git = "https://github.com/langchain-ai/langgraph.git?subdirectory=libs%2Flanggraph" },
|
||||
{ name = "langgraph" },
|
||||
{ name = "lark", specifier = ">=1.1.9,<2.0.0" },
|
||||
{ name = "pandas", specifier = ">=2,<3" },
|
||||
{ name = "pypdf", specifier = ">=5.0.0,<6.0.0" },
|
||||
@ -2739,8 +2736,8 @@ typing = []
|
||||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.3"
|
||||
source = { directory = "libs/partners/openai" }
|
||||
version = "0.3.5"
|
||||
source = { editable = "libs/partners/openai" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
{ name = "openai" },
|
||||
@ -2749,19 +2746,19 @@ dependencies = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "langchain-core", specifier = ">=0.3.33,<1.0.0" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "openai", specifier = ">=1.58.1,<2.0.0" },
|
||||
{ name = "tiktoken", specifier = ">=0.7,<1" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", directory = "../../core" }]
|
||||
dev = [{ name = "langchain-core", editable = "libs/core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.5,<1.0" }]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "langchain-tests", editable = "libs/standard-tests" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1,<2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<2.0.0" },
|
||||
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
|
||||
@ -2780,34 +2777,34 @@ test-integration = [
|
||||
{ name = "pillow", specifier = ">=10.3.0,<11.0.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "langchain-core", directory = "../../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "mypy", specifier = ">=1.10,<2.0" },
|
||||
{ name = "types-tqdm", specifier = ">=4.66.0.5,<5.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.6rc1"
|
||||
source = { directory = "libs/text-splitters" }
|
||||
version = "0.3.6"
|
||||
source = { editable = "libs/text-splitters" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "langchain-core", specifier = ">=0.3.34rc1,<1.0.0" }]
|
||||
requires-dist = [{ name = "langchain-core", editable = "libs/core" }]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
]
|
||||
lint = [
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "ruff", specifier = ">=0.9.2,<1.0.0" },
|
||||
]
|
||||
test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "langchain-core", directory = "../core" },
|
||||
{ name = "langchain-core", editable = "libs/core" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
@ -2862,12 +2859,16 @@ sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2
|
||||
[[package]]
|
||||
name = "langgraph"
|
||||
version = "0.2.69"
|
||||
source = { git = "https://github.com/langchain-ai/langgraph.git?subdirectory=libs%2Flanggraph#04a044374270d1c101a65b0215a4f6ac8a17b17d" }
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
{ name = "langgraph-checkpoint" },
|
||||
{ name = "langgraph-sdk" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9b/36/d75836c6c1b4a817bb22f591137dc533be02fdba171df4d80eac49e22043/langgraph-0.2.69.tar.gz", hash = "sha256:77bd6efd967b4f092ec31d2148b3e6ba3c31e202b4f3a975dbb082b19b5bb057", size = 128593 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/be/f3489ee7a67bb5bb3dc256b9e95d19a44c31e337f0345a38b443018355e3/langgraph-0.2.69-py3-none-any.whl", hash = "sha256:b64a5755fa2c7f2f67608ff4ce0ef8c168b30a0fb551a6c1d2e19bf1d2268ce4", size = 148716 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langgraph-checkpoint"
|
||||
|
Loading…
Reference in New Issue
Block a user