mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-05 06:33:20 +00:00
docs: Update elasticsearch README (#18497)
Update Elasticsearch README with information on how to start a deployment. Also make some cosmetic changes to the [Elasticsearch docs](https://python.langchain.com/docs/integrations/vectorstores/elasticsearch). Follow up on https://github.com/langchain-ai/langchain/pull/17467
This commit is contained in:
parent
6a08134661
commit
81e9ab6e3a
@ -21,7 +21,7 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"%pip install --upgrade --quiet langchain-elasticsearch langchain-openai tiktoken langchain"
|
"%pip install --upgrade --quiet langchain-elasticsearch langchain-openai tiktoken langchain"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -57,61 +57,61 @@
|
|||||||
"Example: Run a single-node Elasticsearch instance with security disabled. This is not recommended for production use.\n",
|
"Example: Run a single-node Elasticsearch instance with security disabled. This is not recommended for production use.\n",
|
||||||
"\n",
|
"\n",
|
||||||
"```bash\n",
|
"```bash\n",
|
||||||
" docker run -p 9200:9200 -e \"discovery.type=single-node\" -e \"xpack.security.enabled=false\" -e \"xpack.security.http.ssl.enabled=false\" docker.elastic.co/elasticsearch/elasticsearch:8.9.0\n",
|
"docker run -p 9200:9200 -e \"discovery.type=single-node\" -e \"xpack.security.enabled=false\" -e \"xpack.security.http.ssl.enabled=false\" docker.elastic.co/elasticsearch/elasticsearch:8.12.1\n",
|
||||||
"```\n",
|
"```\n",
|
||||||
"\n",
|
"\n",
|
||||||
"Once the Elasticsearch instance is running, you can connect to it using the Elasticsearch URL and index name along with the embedding object to the constructor.\n",
|
"Once the Elasticsearch instance is running, you can connect to it using the Elasticsearch URL and index name along with the embedding object to the constructor.\n",
|
||||||
"\n",
|
"\n",
|
||||||
"Example:\n",
|
"Example:\n",
|
||||||
"```python\n",
|
"```python\n",
|
||||||
" from langchain_elasticsearch import ElasticsearchStore\n",
|
"from langchain_elasticsearch import ElasticsearchStore\n",
|
||||||
" from langchain_openai import OpenAIEmbeddings\n",
|
"from langchain_openai import OpenAIEmbeddings\n",
|
||||||
"\n",
|
"\n",
|
||||||
" embedding = OpenAIEmbeddings()\n",
|
"embedding = OpenAIEmbeddings()\n",
|
||||||
" elastic_vector_search = ElasticsearchStore(\n",
|
"elastic_vector_search = ElasticsearchStore(\n",
|
||||||
" es_url=\"http://localhost:9200\",\n",
|
" es_url=\"http://localhost:9200\",\n",
|
||||||
" index_name=\"test_index\",\n",
|
" index_name=\"test_index\",\n",
|
||||||
" embedding=embedding\n",
|
" embedding=embedding\n",
|
||||||
" )\n",
|
")\n",
|
||||||
"```\n",
|
"```\n",
|
||||||
"### Authentication\n",
|
"### Authentication\n",
|
||||||
"For production, we recommend you run with security enabled. To connect with login credentials, you can use the parameters `es_api_key` or `es_user` and `es_password`.\n",
|
"For production, we recommend you run with security enabled. To connect with login credentials, you can use the parameters `es_api_key` or `es_user` and `es_password`.\n",
|
||||||
"\n",
|
"\n",
|
||||||
"Example:\n",
|
"Example:\n",
|
||||||
"```python\n",
|
"```python\n",
|
||||||
" from langchain_elasticsearch import ElasticsearchStore\n",
|
"from langchain_elasticsearch import ElasticsearchStore\n",
|
||||||
" from langchain_openai import OpenAIEmbeddings\n",
|
"from langchain_openai import OpenAIEmbeddings\n",
|
||||||
"\n",
|
"\n",
|
||||||
" embedding = OpenAIEmbeddings()\n",
|
"embedding = OpenAIEmbeddings()\n",
|
||||||
" elastic_vector_search = ElasticsearchStore(\n",
|
"elastic_vector_search = ElasticsearchStore(\n",
|
||||||
" es_url=\"http://localhost:9200\",\n",
|
" es_url=\"http://localhost:9200\",\n",
|
||||||
" index_name=\"test_index\",\n",
|
" index_name=\"test_index\",\n",
|
||||||
" embedding=embedding,\n",
|
" embedding=embedding,\n",
|
||||||
" es_user=\"elastic\",\n",
|
" es_user=\"elastic\",\n",
|
||||||
" es_password=\"changeme\"\n",
|
" es_password=\"changeme\"\n",
|
||||||
" )\n",
|
")\n",
|
||||||
"```\n",
|
"```\n",
|
||||||
"\n",
|
"\n",
|
||||||
"You can also use an `Elasticsearch` client object that gives you more flexibility, for example to configure the maximum number of retries.\n",
|
"You can also use an `Elasticsearch` client object that gives you more flexibility, for example to configure the maximum number of retries.\n",
|
||||||
"\n",
|
"\n",
|
||||||
"Example:\n",
|
"Example:\n",
|
||||||
"```python\n",
|
"```python\n",
|
||||||
" import elasticsearch\n",
|
"import elasticsearch\n",
|
||||||
" from langchain_elasticsearch import ElasticsearchStore\n",
|
"from langchain_elasticsearch import ElasticsearchStore\n",
|
||||||
"\n",
|
"\n",
|
||||||
" es_client= elasticsearch.Elasticsearch(\n",
|
"es_client= elasticsearch.Elasticsearch(\n",
|
||||||
" hosts=[\"http://localhost:9200\"],\n",
|
" hosts=[\"http://localhost:9200\"],\n",
|
||||||
" es_user=\"elastic\",\n",
|
" es_user=\"elastic\",\n",
|
||||||
" es_password=\"changeme\"\n",
|
" es_password=\"changeme\"\n",
|
||||||
" max_retries=10,\n",
|
" max_retries=10,\n",
|
||||||
" )\n",
|
")\n",
|
||||||
"\n",
|
"\n",
|
||||||
" embedding = OpenAIEmbeddings()\n",
|
"embedding = OpenAIEmbeddings()\n",
|
||||||
" elastic_vector_search = ElasticsearchStore(\n",
|
"elastic_vector_search = ElasticsearchStore(\n",
|
||||||
" index_name=\"test_index\",\n",
|
" index_name=\"test_index\",\n",
|
||||||
" es_connection=es_client,\n",
|
" es_connection=es_client,\n",
|
||||||
" embedding=embedding,\n",
|
" embedding=embedding,\n",
|
||||||
" )\n",
|
")\n",
|
||||||
"```\n",
|
"```\n",
|
||||||
"\n",
|
"\n",
|
||||||
"#### How to obtain a password for the default \"elastic\" user?\n",
|
"#### How to obtain a password for the default \"elastic\" user?\n",
|
||||||
@ -137,17 +137,17 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"Example:\n",
|
"Example:\n",
|
||||||
"```python\n",
|
"```python\n",
|
||||||
" from langchain_elasticsearch import ElasticsearchStore\n",
|
"from langchain_elasticsearch import ElasticsearchStore\n",
|
||||||
" from langchain_openai import OpenAIEmbeddings\n",
|
"from langchain_openai import OpenAIEmbeddings\n",
|
||||||
"\n",
|
"\n",
|
||||||
" embedding = OpenAIEmbeddings()\n",
|
"embedding = OpenAIEmbeddings()\n",
|
||||||
" elastic_vector_search = ElasticsearchStore(\n",
|
"elastic_vector_search = ElasticsearchStore(\n",
|
||||||
" es_cloud_id=\"<cloud_id>\",\n",
|
" es_cloud_id=\"<cloud_id>\",\n",
|
||||||
" index_name=\"test_index\",\n",
|
" index_name=\"test_index\",\n",
|
||||||
" embedding=embedding,\n",
|
" embedding=embedding,\n",
|
||||||
" es_user=\"elastic\",\n",
|
" es_user=\"elastic\",\n",
|
||||||
" es_password=\"changeme\"\n",
|
" es_password=\"changeme\"\n",
|
||||||
" )\n",
|
")\n",
|
||||||
"```"
|
"```"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -8,16 +8,38 @@ This package contains the LangChain integration with Elasticsearch.
|
|||||||
pip install -U langchain-elasticsearch
|
pip install -U langchain-elasticsearch
|
||||||
```
|
```
|
||||||
|
|
||||||
TODO document how to get id and key
|
## Elasticsearch setup
|
||||||
|
|
||||||
|
### Elastic Cloud
|
||||||
|
|
||||||
|
You need a running Elasticsearch deployment. The easiest way to start one is through [Elastic Cloud](https://cloud.elastic.co/).
|
||||||
|
You can sign up for a [free trial](https://www.elastic.co/cloud/cloud-trial-overview).
|
||||||
|
|
||||||
|
1. [Create a deployment](https://www.elastic.co/guide/en/cloud/current/ec-create-deployment.html)
|
||||||
|
2. Get your Cloud ID:
|
||||||
|
1. In the [Elastic Cloud console](https://cloud.elastic.co), click "Manage" next to your deployment
|
||||||
|
2. Copy the Cloud ID and paste it into the `es_cloud_id` parameter below
|
||||||
|
3. Create an API key:
|
||||||
|
1. In the [Elastic Cloud console](https://cloud.elastic.co), click "Open" next to your deployment
|
||||||
|
2. In the left-hand side menu, go to "Stack Management", then to "API Keys"
|
||||||
|
3. Click "Create API key"
|
||||||
|
4. Enter a name for the API key and click "Create"
|
||||||
|
5. Copy the API key and paste it into the `es_api_key` parameter below
|
||||||
|
|
||||||
|
### Elastic Cloud
|
||||||
|
|
||||||
|
Alternatively, you can run Elasticsearch via Docker as described in the [docs](https://python.langchain.com/docs/integrations/vectorstores/elasticsearch).
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
The `ElasticsearchStore` class exposes the connection to the Pinecone vector store.
|
### ElasticsearchStore
|
||||||
|
|
||||||
|
The `ElasticsearchStore` class exposes Elasticsearch as a vector store.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from langchain_elasticsearch import ElasticsearchStore
|
from langchain_elasticsearch import ElasticsearchStore
|
||||||
|
|
||||||
embeddings = ... # use a LangChain Embeddings class
|
embeddings = ... # use a LangChain Embeddings class or ElasticsearchEmbeddings
|
||||||
|
|
||||||
vectorstore = ElasticsearchStore(
|
vectorstore = ElasticsearchStore(
|
||||||
es_cloud_id="your-cloud-id",
|
es_cloud_id="your-cloud-id",
|
||||||
@ -27,3 +49,33 @@ vectorstore = ElasticsearchStore(
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### ElasticsearchEmbeddings
|
||||||
|
|
||||||
|
The `ElasticsearchEmbeddings` class provides an interface to generate embeddings using a model
|
||||||
|
deployed in an Elasticsearch cluster.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from langchain_elasticsearch import ElasticsearchEmbeddings
|
||||||
|
|
||||||
|
embeddings = ElasticsearchEmbeddings.from_credentials(
|
||||||
|
model_id="your-model-id",
|
||||||
|
input_field="your-input-field",
|
||||||
|
es_cloud_id="your-cloud-id",
|
||||||
|
es_api_key="your-api-key",
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### ElasticsearchChatMessageHistory
|
||||||
|
|
||||||
|
The `ElasticsearchChatMessageHistory` class stores chat histories in Elasticsearch.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from langchain_elasticsearch import ElasticsearchChatMessageHistory
|
||||||
|
|
||||||
|
chat_history = ElasticsearchChatMessageHistory(
|
||||||
|
index="your-index-name",
|
||||||
|
session_id="your-session-id",
|
||||||
|
es_cloud_id="your-cloud-id",
|
||||||
|
es_api_key="your-api-key",
|
||||||
|
)
|
||||||
|
```
|
||||||
|
45
libs/partners/elasticsearch/poetry.lock
generated
45
libs/partners/elasticsearch/poetry.lock
generated
@ -599,7 +599,7 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain"
|
name = "langchain"
|
||||||
version = "0.1.9"
|
version = "0.1.10"
|
||||||
description = "Building applications with LLMs through composability"
|
description = "Building applications with LLMs through composability"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8.1,<4.0"
|
python-versions = ">=3.8.1,<4.0"
|
||||||
@ -611,9 +611,10 @@ aiohttp = "^3.8.3"
|
|||||||
async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""}
|
async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""}
|
||||||
dataclasses-json = ">= 0.5.7, < 0.7"
|
dataclasses-json = ">= 0.5.7, < 0.7"
|
||||||
jsonpatch = "^1.33"
|
jsonpatch = "^1.33"
|
||||||
langchain-community = ">=0.0.21,<0.1"
|
langchain-community = ">=0.0.25,<0.1"
|
||||||
langchain-core = ">=0.1.26,<0.2"
|
langchain-core = ">=0.1.28,<0.2"
|
||||||
langsmith = "^0.1.0"
|
langchain-text-splitters = ">=0.0.1,<0.1"
|
||||||
|
langsmith = "^0.1.14"
|
||||||
numpy = "^1"
|
numpy = "^1"
|
||||||
pydantic = ">=1,<3"
|
pydantic = ">=1,<3"
|
||||||
PyYAML = ">=5.3"
|
PyYAML = ">=5.3"
|
||||||
@ -623,7 +624,7 @@ tenacity = "^8.1.0"
|
|||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
all = []
|
all = []
|
||||||
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"]
|
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"]
|
||||||
clarifai = ["clarifai (>=9.1.0)"]
|
clarifai = ["clarifai (>=9.1.0)"]
|
||||||
cli = ["typer (>=0.9.0,<0.10.0)"]
|
cli = ["typer (>=0.9.0,<0.10.0)"]
|
||||||
cohere = ["cohere (>=4,<5)"]
|
cohere = ["cohere (>=4,<5)"]
|
||||||
@ -642,7 +643,7 @@ url = "../../langchain"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain-community"
|
name = "langchain-community"
|
||||||
version = "0.0.24"
|
version = "0.0.25"
|
||||||
description = "Community contributed LangChain integrations."
|
description = "Community contributed LangChain integrations."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8.1,<4.0"
|
python-versions = ">=3.8.1,<4.0"
|
||||||
@ -652,7 +653,7 @@ develop = true
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
aiohttp = "^3.8.3"
|
aiohttp = "^3.8.3"
|
||||||
dataclasses-json = ">= 0.5.7, < 0.7"
|
dataclasses-json = ">= 0.5.7, < 0.7"
|
||||||
langchain-core = ">=0.1.26,<0.2"
|
langchain-core = "^0.1.28"
|
||||||
langsmith = "^0.1.0"
|
langsmith = "^0.1.0"
|
||||||
numpy = "^1"
|
numpy = "^1"
|
||||||
PyYAML = ">=5.3"
|
PyYAML = ">=5.3"
|
||||||
@ -670,7 +671,7 @@ url = "../../community"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain-core"
|
name = "langchain-core"
|
||||||
version = "0.1.26"
|
version = "0.1.28"
|
||||||
description = "Building applications with LLMs through composability"
|
description = "Building applications with LLMs through composability"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8.1,<4.0"
|
python-versions = ">=3.8.1,<4.0"
|
||||||
@ -694,15 +695,34 @@ extended-testing = ["jinja2 (>=3,<4)"]
|
|||||||
type = "directory"
|
type = "directory"
|
||||||
url = "../../core"
|
url = "../../core"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "langchain-text-splitters"
|
||||||
|
version = "0.0.1"
|
||||||
|
description = "LangChain text splitting utilities"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8.1,<4.0"
|
||||||
|
files = []
|
||||||
|
develop = true
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
langchain-core = "^0.1.28"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
extended-testing = ["lxml (>=5.1.0,<6.0.0)"]
|
||||||
|
|
||||||
|
[package.source]
|
||||||
|
type = "directory"
|
||||||
|
url = "../../text-splitters"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langsmith"
|
name = "langsmith"
|
||||||
version = "0.1.9"
|
version = "0.1.14"
|
||||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8.1,<4.0"
|
python-versions = ">=3.8.1,<4.0"
|
||||||
files = [
|
files = [
|
||||||
{file = "langsmith-0.1.9-py3-none-any.whl", hash = "sha256:f821b3cb07a87eac5cb2181ff0b61051811e4eef09ae4b46e700981f7ae5dfb9"},
|
{file = "langsmith-0.1.14-py3-none-any.whl", hash = "sha256:ecb243057d2a43c2da0524fe395585bc3421bb5d24f1cdd53eb06fbe63e43a69"},
|
||||||
{file = "langsmith-0.1.9.tar.gz", hash = "sha256:9bd3e80607722c3d2db84cf3440005491a859b80b5e499bc988032d5c2da91f0"},
|
{file = "langsmith-0.1.14.tar.gz", hash = "sha256:b95f267d25681f4c9862bb68236fba8a57a60ec7921ecfdaa125936807e51bde"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -1232,7 +1252,6 @@ files = [
|
|||||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
|
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
||||||
@ -1652,4 +1671,4 @@ multidict = ">=4.0"
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.8.1,<4.0"
|
python-versions = ">=3.8.1,<4.0"
|
||||||
content-hash = "2d6d655d583bada95d7c62a5ef37080eff99819e7ca02c9c75f73de3e59e0aff"
|
content-hash = "7521bf553cf55c1883bcf93e79103bc6136ccba901bb8277af24f1539d8c8bce"
|
||||||
|
@ -29,6 +29,7 @@ pytest-asyncio = "^0.21.1"
|
|||||||
langchain = { path = "../../langchain", develop = true }
|
langchain = { path = "../../langchain", develop = true }
|
||||||
langchain-community = { path = "../../community", develop = true }
|
langchain-community = { path = "../../community", develop = true }
|
||||||
langchain-core = { path = "../../core", develop = true }
|
langchain-core = { path = "../../core", develop = true }
|
||||||
|
langchain-text-splitters = {path = "../../text-splitters", develop = true}
|
||||||
|
|
||||||
[tool.poetry.group.codespell]
|
[tool.poetry.group.codespell]
|
||||||
optional = true
|
optional = true
|
||||||
|
Loading…
Reference in New Issue
Block a user