mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-04 08:10:25 +00:00
Compare commits
154 Commits
copilot/fi
...
langchain-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
89c6cb6f8b | ||
|
|
aa9f247803 | ||
|
|
fce9322d2e | ||
|
|
e2c8690e7f | ||
|
|
287911adbc | ||
|
|
fc08f240ee | ||
|
|
bee8994b7e | ||
|
|
c62772885a | ||
|
|
374bb40852 | ||
|
|
c43c62b6c9 | ||
|
|
bb83f1b875 | ||
|
|
ed412d89b4 | ||
|
|
789f4b1c9c | ||
|
|
13ba15b2cc | ||
|
|
b904763115 | ||
|
|
cecdc119bf | ||
|
|
7975c1f0ca | ||
|
|
622cb7d2cf | ||
|
|
3a0c7c705c | ||
|
|
162d3ff54b | ||
|
|
301be2d40a | ||
|
|
6df9178056 | ||
|
|
6208773c77 | ||
|
|
e24259fee7 | ||
|
|
9132516c84 | ||
|
|
f2f9187919 | ||
|
|
438301db90 | ||
|
|
7842e2c460 | ||
|
|
949127fbc1 | ||
|
|
baad44965e | ||
|
|
d588ce1f29 | ||
|
|
2070d659a0 | ||
|
|
6c8d626d70 | ||
|
|
109ba548bd | ||
|
|
0f4a087186 | ||
|
|
71268f7a15 | ||
|
|
b8fc82b84b | ||
|
|
179eeead81 | ||
|
|
7a57b4fbbf | ||
|
|
d9ba65ca26 | ||
|
|
0319ccd273 | ||
|
|
6e2a72c218 | ||
|
|
9f482f4284 | ||
|
|
15466d89a2 | ||
|
|
61087b0c0d | ||
|
|
b2ba4f4072 | ||
|
|
b2c8f2de4c | ||
|
|
6df9360e32 | ||
|
|
b664b3364c | ||
|
|
bccc546a25 | ||
|
|
6405e7fa07 | ||
|
|
ae24f7364d | ||
|
|
81f8c2f33d | ||
|
|
c27703a10f | ||
|
|
1b77063c88 | ||
|
|
b74546a458 | ||
|
|
8a3a9c8968 | ||
|
|
776d01db49 | ||
|
|
40b43b0bfb | ||
|
|
6fd4ac4283 | ||
|
|
f4e7cb394f | ||
|
|
1ecaffab8a | ||
|
|
5bbd5364f1 | ||
|
|
e02b093d81 | ||
|
|
0cc6584889 | ||
|
|
6e1b0d0228 | ||
|
|
a111098230 | ||
|
|
9e7222618b | ||
|
|
8516a03a02 | ||
|
|
1ad66e70dc | ||
|
|
76564edd3a | ||
|
|
1c51e1693d | ||
|
|
a267da6a3a | ||
|
|
8da2ace99d | ||
|
|
e358846b39 | ||
|
|
3c598d25a6 | ||
|
|
e5aa0f938b | ||
|
|
79c46319dd | ||
|
|
c5d4dfefc0 | ||
|
|
6e853501ec | ||
|
|
fd1f3ca213 | ||
|
|
567a4ce5aa | ||
|
|
923ce84aa7 | ||
|
|
9379613132 | ||
|
|
c72a76237f | ||
|
|
f9cafcbcb0 | ||
|
|
1fce5543bc | ||
|
|
88e9e6bf55 | ||
|
|
7f0dd4b182 | ||
|
|
5557b86a54 | ||
|
|
caf4ae3a45 | ||
|
|
c88b75ca6a | ||
|
|
e409a85a28 | ||
|
|
40634d441a | ||
|
|
1d2a503ab8 | ||
|
|
b924c61440 | ||
|
|
efa10c8ef8 | ||
|
|
0a6c67ce6a | ||
|
|
ed771f2d2b | ||
|
|
63ba12d8e0 | ||
|
|
f785cf029b | ||
|
|
be7cd0756f | ||
|
|
51c6899850 | ||
|
|
163d6fe8ef | ||
|
|
7cee7fbfad | ||
|
|
4799ad95d0 | ||
|
|
88065d794b | ||
|
|
b27bfa6717 | ||
|
|
5adeaf0732 | ||
|
|
f9d91e19c5 | ||
|
|
4c7afb0d6c | ||
|
|
c1ff61669d | ||
|
|
54d6808c1e | ||
|
|
78468de2e5 | ||
|
|
76572f963b | ||
|
|
c0448f27ba | ||
|
|
179aaa4007 | ||
|
|
d072d592a1 | ||
|
|
78c454c130 | ||
|
|
5199555c0d | ||
|
|
5e31cd91a7 | ||
|
|
49a1f5dd47 | ||
|
|
d0cc9b022a | ||
|
|
a91bd2737a | ||
|
|
5ad2b8ce80 | ||
|
|
b78764599b | ||
|
|
2888e34f53 | ||
|
|
dd4418a503 | ||
|
|
a976f2071b | ||
|
|
5f98975be0 | ||
|
|
0529c991ce | ||
|
|
954abcce59 | ||
|
|
6ad515d34e | ||
|
|
99348e1614 | ||
|
|
2c742cc20d | ||
|
|
02f87203f7 | ||
|
|
56163481dd | ||
|
|
6aac2eeab5 | ||
|
|
559d8a4d13 | ||
|
|
ec9e8eb71c | ||
|
|
9399df7777 | ||
|
|
5fc1104d00 | ||
|
|
6777106fbe | ||
|
|
5f5287c3b0 | ||
|
|
615f8b0d47 | ||
|
|
9a9ab65030 | ||
|
|
241b6d2355 | ||
|
|
91e09ffee5 | ||
|
|
8e4bae351e | ||
|
|
0da201c1d5 | ||
|
|
29413a22e1 | ||
|
|
ae5a574aa5 | ||
|
|
5a0e82c31c | ||
|
|
8590b421c4 |
11
.github/scripts/check_diff.py
vendored
11
.github/scripts/check_diff.py
vendored
@@ -16,6 +16,10 @@ LANGCHAIN_DIRS = [
|
||||
"libs/experimental",
|
||||
]
|
||||
|
||||
# for 0.3rc, we are ignoring core dependents
|
||||
# in order to be able to get CI to pass for individual PRs.
|
||||
IGNORE_CORE_DEPENDENTS = True
|
||||
|
||||
# ignored partners are removed from dependents
|
||||
# but still run if directly edited
|
||||
IGNORED_PARTNERS = [
|
||||
@@ -102,9 +106,9 @@ def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, str]]:
|
||||
if dir_ == "libs/core":
|
||||
return [
|
||||
{"working-directory": dir_, "python-version": f"3.{v}"}
|
||||
for v in range(8, 13)
|
||||
for v in range(9, 13)
|
||||
]
|
||||
min_python = "3.8"
|
||||
min_python = "3.9"
|
||||
max_python = "3.12"
|
||||
|
||||
# custom logic for specific directories
|
||||
@@ -184,6 +188,9 @@ if __name__ == "__main__":
|
||||
# for extended testing
|
||||
found = False
|
||||
for dir_ in LANGCHAIN_DIRS:
|
||||
if dir_ == "libs/core" and IGNORE_CORE_DEPENDENTS:
|
||||
dirs_to_run["extended-test"].add(dir_)
|
||||
continue
|
||||
if file.startswith(dir_):
|
||||
found = True
|
||||
if found:
|
||||
|
||||
@@ -11,7 +11,7 @@ if __name__ == "__main__":
|
||||
|
||||
# see if we're releasing an rc
|
||||
version = toml_data["tool"]["poetry"]["version"]
|
||||
releasing_rc = "rc" in version
|
||||
releasing_rc = "rc" in version or "dev" in version
|
||||
|
||||
# if not, iterate through dependencies and make sure none allow prereleases
|
||||
if not releasing_rc:
|
||||
|
||||
1
.github/scripts/get_min_versions.py
vendored
1
.github/scripts/get_min_versions.py
vendored
@@ -15,6 +15,7 @@ MIN_VERSION_LIBS = [
|
||||
"langchain",
|
||||
"langchain-text-splitters",
|
||||
"SQLAlchemy",
|
||||
"pydantic",
|
||||
]
|
||||
|
||||
SKIP_IF_PULL_REQUEST = ["langchain-core"]
|
||||
|
||||
114
.github/workflows/_dependencies.yml
vendored
114
.github/workflows/_dependencies.yml
vendored
@@ -1,114 +0,0 @@
|
||||
name: dependencies
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
working-directory:
|
||||
required: true
|
||||
type: string
|
||||
description: "From which folder this pipeline executes"
|
||||
langchain-location:
|
||||
required: false
|
||||
type: string
|
||||
description: "Relative path to the langchain library folder"
|
||||
python-version:
|
||||
required: true
|
||||
type: string
|
||||
description: "Python version to use"
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.7.1"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
runs-on: ubuntu-latest
|
||||
name: dependency checks ${{ inputs.python-version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: pydantic-cross-compat
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: poetry install
|
||||
|
||||
- name: Check imports with base dependencies
|
||||
shell: bash
|
||||
run: poetry run make check_imports
|
||||
|
||||
- name: Install test dependencies
|
||||
shell: bash
|
||||
run: poetry install --with test
|
||||
|
||||
- name: Install langchain editable
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
if: ${{ inputs.langchain-location }}
|
||||
env:
|
||||
LANGCHAIN_LOCATION: ${{ inputs.langchain-location }}
|
||||
run: |
|
||||
poetry run pip install -e "$LANGCHAIN_LOCATION"
|
||||
|
||||
- name: Install the opposite major version of pydantic
|
||||
# If normal tests use pydantic v1, here we'll use v2, and vice versa.
|
||||
shell: bash
|
||||
# airbyte currently doesn't support pydantic v2
|
||||
if: ${{ !startsWith(inputs.working-directory, 'libs/partners/airbyte') }}
|
||||
run: |
|
||||
# Determine the major part of pydantic version
|
||||
REGULAR_VERSION=$(poetry run python -c "import pydantic; print(pydantic.__version__)" | cut -d. -f1)
|
||||
|
||||
if [[ "$REGULAR_VERSION" == "1" ]]; then
|
||||
PYDANTIC_DEP=">=2.1,<3"
|
||||
TEST_WITH_VERSION="2"
|
||||
elif [[ "$REGULAR_VERSION" == "2" ]]; then
|
||||
PYDANTIC_DEP="<2"
|
||||
TEST_WITH_VERSION="1"
|
||||
else
|
||||
echo "Unexpected pydantic major version '$REGULAR_VERSION', cannot determine which version to use for cross-compatibility test."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Install via `pip` instead of `poetry add` to avoid changing lockfile,
|
||||
# which would prevent caching from working: the cache would get saved
|
||||
# to a different key than where it gets loaded from.
|
||||
poetry run pip install "pydantic${PYDANTIC_DEP}"
|
||||
|
||||
# Ensure that the correct pydantic is installed now.
|
||||
echo "Checking pydantic version... Expecting ${TEST_WITH_VERSION}"
|
||||
|
||||
# Determine the major part of pydantic version
|
||||
CURRENT_VERSION=$(poetry run python -c "import pydantic; print(pydantic.__version__)" | cut -d. -f1)
|
||||
|
||||
# Check that the major part of pydantic version is as expected, if not
|
||||
# raise an error
|
||||
if [[ "$CURRENT_VERSION" != "$TEST_WITH_VERSION" ]]; then
|
||||
echo "Error: expected pydantic version ${CURRENT_VERSION} to have been installed, but found: ${TEST_WITH_VERSION}"
|
||||
exit 1
|
||||
fi
|
||||
echo "Found pydantic version ${CURRENT_VERSION}, as expected"
|
||||
- name: Run pydantic compatibility tests
|
||||
# airbyte currently doesn't support pydantic v2
|
||||
if: ${{ !startsWith(inputs.working-directory, 'libs/partners/airbyte') }}
|
||||
shell: bash
|
||||
run: make test
|
||||
|
||||
- name: Ensure the tests did not create any additional files
|
||||
shell: bash
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
STATUS="$(git status)"
|
||||
echo "$STATUS"
|
||||
|
||||
# grep will exit non-zero if the target message isn't found,
|
||||
# and `set -e` above will cause the step to fail.
|
||||
echo "$STATUS" | grep 'nothing to commit, working tree clean'
|
||||
20
.github/workflows/check_diffs.yml
vendored
20
.github/workflows/check_diffs.yml
vendored
@@ -46,6 +46,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
job-configs: ${{ fromJson(needs.build.outputs.lint) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_lint.yml
|
||||
with:
|
||||
working-directory: ${{ matrix.job-configs.working-directory }}
|
||||
@@ -59,6 +60,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
job-configs: ${{ fromJson(needs.build.outputs.test) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_test.yml
|
||||
with:
|
||||
working-directory: ${{ matrix.job-configs.working-directory }}
|
||||
@@ -71,6 +73,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
job-configs: ${{ fromJson(needs.build.outputs.test-doc-imports) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_test_doc_imports.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
@@ -83,25 +86,13 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
job-configs: ${{ fromJson(needs.build.outputs.compile-integration-tests) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_compile_integration_test.yml
|
||||
with:
|
||||
working-directory: ${{ matrix.job-configs.working-directory }}
|
||||
python-version: ${{ matrix.job-configs.python-version }}
|
||||
secrets: inherit
|
||||
|
||||
dependencies:
|
||||
name: cd ${{ matrix.job-configs.working-directory }}
|
||||
needs: [ build ]
|
||||
if: ${{ needs.build.outputs.dependencies != '[]' }}
|
||||
strategy:
|
||||
matrix:
|
||||
job-configs: ${{ fromJson(needs.build.outputs.dependencies) }}
|
||||
uses: ./.github/workflows/_dependencies.yml
|
||||
with:
|
||||
working-directory: ${{ matrix.job-configs.working-directory }}
|
||||
python-version: ${{ matrix.job-configs.python-version }}
|
||||
secrets: inherit
|
||||
|
||||
extended-tests:
|
||||
name: "cd ${{ matrix.job-configs.working-directory }} / make extended_tests #${{ matrix.job-configs.python-version }}"
|
||||
needs: [ build ]
|
||||
@@ -110,6 +101,7 @@ jobs:
|
||||
matrix:
|
||||
# note different variable for extended test dirs
|
||||
job-configs: ${{ fromJson(needs.build.outputs.extended-tests) }}
|
||||
fail-fast: false
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -149,7 +141,7 @@ jobs:
|
||||
echo "$STATUS" | grep 'nothing to commit, working tree clean'
|
||||
ci_success:
|
||||
name: "CI Success"
|
||||
needs: [build, lint, test, compile-integration-tests, dependencies, extended-tests, test-doc-imports]
|
||||
needs: [build, lint, test, compile-integration-tests, extended-tests, test-doc-imports]
|
||||
if: |
|
||||
always()
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
2
.github/workflows/scheduled_test.yml
vendored
2
.github/workflows/scheduled_test.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.8"
|
||||
- "3.9"
|
||||
- "3.11"
|
||||
working-directory:
|
||||
- "libs/partners/openai"
|
||||
|
||||
1
Makefile
1
Makefile
@@ -36,7 +36,6 @@ api_docs_build:
|
||||
API_PKG ?= text-splitters
|
||||
|
||||
api_docs_quick_preview:
|
||||
poetry run pip install "pydantic<2"
|
||||
poetry run python docs/api_reference/create_api_rst.py $(API_PKG)
|
||||
cd docs/api_reference && poetry run make html
|
||||
poetry run python docs/api_reference/scripts/custom_formatter.py docs/api_reference/_build/html/
|
||||
|
||||
@@ -17,7 +17,10 @@ def process_toc_h3_elements(html_content: str) -> str:
|
||||
|
||||
# Process each element
|
||||
for element in toc_h3_elements:
|
||||
element = element.a.code.span
|
||||
try:
|
||||
element = element.a.code.span
|
||||
except Exception:
|
||||
continue
|
||||
# Get the text content of the element
|
||||
content = element.get_text()
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
:member-order: groupwise
|
||||
:show-inheritance: True
|
||||
:special-members: __call__
|
||||
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace
|
||||
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace, model_construct, model_copy, model_dump, model_dump_json, model_parametrized_name, model_post_init, model_rebuild, model_validate, model_validate_json, model_validate_strings, model_extra, model_fields_set, model_json_schema
|
||||
|
||||
|
||||
{% block attributes %}
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
:member-order: groupwise
|
||||
:show-inheritance: True
|
||||
:special-members: __call__
|
||||
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace, astream_log, transform, atransform, get_output_schema, get_prompts, config_schema, map, pick, pipe, with_listeners, with_alisteners, with_config, with_fallbacks, with_types, with_retry, InputType, OutputType, config_specs, output_schema, get_input_schema, get_graph, get_name, input_schema, name, bind, assign, as_tool
|
||||
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace, astream_log, transform, atransform, get_output_schema, get_prompts, config_schema, map, pick, pipe, with_listeners, with_alisteners, with_config, with_fallbacks, with_types, with_retry, InputType, OutputType, config_specs, output_schema, get_input_schema, get_graph, get_name, input_schema, name, bind, assign, as_tool, get_config_jsonschema, get_input_jsonschema, get_output_jsonschema, model_construct, model_copy, model_dump, model_dump_json, model_parametrized_name, model_post_init, model_rebuild, model_validate, model_validate_json, model_validate_strings, to_json, model_extra, model_fields_set, model_json_schema
|
||||
|
||||
.. NOTE:: {{objname}} implements the standard :py:class:`Runnable Interface <langchain_core.runnables.base.Runnable>`. 🏃
|
||||
|
||||
|
||||
@@ -945,7 +945,7 @@ Here's an example:
|
||||
```python
|
||||
from typing import Optional
|
||||
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class Joke(BaseModel):
|
||||
@@ -1062,7 +1062,7 @@ a `tool_calls` field containing `args` that match the desired shape.
|
||||
There are several acceptable formats you can use to bind tools to a model in LangChain. Here's one example:
|
||||
|
||||
```python
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from pydantic import BaseModel, Field
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
class ResponseFormatter(BaseModel):
|
||||
|
||||
@@ -18,8 +18,23 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "994d6c74",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:08:00.190093Z",
|
||||
"iopub.status.busy": "2024-09-10T20:08:00.189665Z",
|
||||
"iopub.status.idle": "2024-09-10T20:08:05.438015Z",
|
||||
"shell.execute_reply": "2024-09-10T20:08:05.437685Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"USER_AGENT environment variable not set, consider setting it to identify your requests.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Build a sample vectorDB\n",
|
||||
"from langchain_chroma import Chroma\n",
|
||||
@@ -54,7 +69,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "edbca101",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:08:05.439930Z",
|
||||
"iopub.status.busy": "2024-09-10T20:08:05.439810Z",
|
||||
"iopub.status.idle": "2024-09-10T20:08:05.553766Z",
|
||||
"shell.execute_reply": "2024-09-10T20:08:05.553520Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.retrievers.multi_query import MultiQueryRetriever\n",
|
||||
@@ -71,7 +93,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "9e6d3b69",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:08:05.555359Z",
|
||||
"iopub.status.busy": "2024-09-10T20:08:05.555262Z",
|
||||
"iopub.status.idle": "2024-09-10T20:08:05.557046Z",
|
||||
"shell.execute_reply": "2024-09-10T20:08:05.556825Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Set logging for the queries\n",
|
||||
@@ -85,13 +114,20 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "bc93dc2b-9407-48b0-9f9a-338247e7eb69",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:08:05.558176Z",
|
||||
"iopub.status.busy": "2024-09-10T20:08:05.558100Z",
|
||||
"iopub.status.idle": "2024-09-10T20:08:07.250342Z",
|
||||
"shell.execute_reply": "2024-09-10T20:08:07.249711Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"INFO:langchain.retrievers.multi_query:Generated queries: ['1. How can Task Decomposition be achieved through different methods?', '2. What strategies are commonly used for Task Decomposition?', '3. What are the various techniques for breaking down tasks in Task Decomposition?']\n"
|
||||
"INFO:langchain.retrievers.multi_query:Generated queries: ['1. How can Task Decomposition be achieved through different methods?', '2. What strategies are commonly used for Task Decomposition?', '3. What are the various ways to break down tasks in Task Decomposition?']\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -137,14 +173,21 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "d9afb0ca",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:08:07.253875Z",
|
||||
"iopub.status.busy": "2024-09-10T20:08:07.253600Z",
|
||||
"iopub.status.idle": "2024-09-10T20:08:07.277848Z",
|
||||
"shell.execute_reply": "2024-09-10T20:08:07.277487Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List\n",
|
||||
"\n",
|
||||
"from langchain_core.output_parsers import BaseOutputParser\n",
|
||||
"from langchain_core.prompts import PromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Output parser will split the LLM result into a list of queries\n",
|
||||
@@ -180,13 +223,20 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "59c75c56-dbd7-4887-b9ba-0b5b21069f51",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:08:07.280001Z",
|
||||
"iopub.status.busy": "2024-09-10T20:08:07.279861Z",
|
||||
"iopub.status.idle": "2024-09-10T20:08:09.579525Z",
|
||||
"shell.execute_reply": "2024-09-10T20:08:09.578837Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"INFO:langchain.retrievers.multi_query:Generated queries: ['1. Can you provide insights on regression from the course material?', '2. How is regression discussed in the course content?', '3. What information does the course offer about regression?', '4. In what way is regression covered in the course?', '5. What are the teachings of the course regarding regression?']\n"
|
||||
"INFO:langchain.retrievers.multi_query:Generated queries: ['1. Can you provide insights on regression from the course material?', '2. How is regression discussed in the course content?', '3. What information does the course offer regarding regression?', '4. In what way is regression covered in the course?', \"5. What are the course's teachings on regression?\"]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -228,7 +278,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.4"
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -26,10 +26,32 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 1,
|
||||
"id": "165b0de6-9ae3-4e3d-aa98-4fc8a97c4a06",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:32.858670Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:32.858278Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:33.009452Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:33.007022Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"zsh:1: 0.2.8 not found\r\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Note: you may need to restart the kernel to use updated packages.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"%pip install -qU langchain>=0.2.8 langchain-openai langchain-anthropic langchain-google-vertexai"
|
||||
]
|
||||
@@ -44,19 +66,48 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 2,
|
||||
"id": "79e14913-803c-4382-9009-5c6af3d75d35",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:33.015729Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:33.015241Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:39.391716Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:39.390438Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"/var/folders/4j/2rz3865x6qg07tx43146py8h0000gn/T/ipykernel_95293/571506279.py:4: LangChainBetaWarning: The function `init_chat_model` is in beta. It is actively being worked on, so the API may change.\n",
|
||||
" gpt_4o = init_chat_model(\"gpt-4o\", model_provider=\"openai\", temperature=0)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"GPT-4o: I'm an AI created by OpenAI, and I don't have a personal name. How can I assist you today?\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"GPT-4o: I'm an AI created by OpenAI, and I don't have a personal name. You can call me Assistant! How can I help you today?\n",
|
||||
"\n",
|
||||
"Claude Opus: My name is Claude. It's nice to meet you!\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Gemini 1.5: I am a large language model, trained by Google. \n",
|
||||
"\n",
|
||||
"Gemini 1.5: I am a large language model, trained by Google. I do not have a name. \n",
|
||||
"I don't have a name like a person does. You can call me Bard if you like! 😊 \n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
@@ -94,9 +145,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 3,
|
||||
"id": "0378ccc6-95bc-4d50-be50-fccc193f0a71",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:39.396908Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:39.396563Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:39.444959Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:39.444646Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"gpt_4o = init_chat_model(\"gpt-4o\", temperature=0)\n",
|
||||
@@ -116,17 +174,24 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 4,
|
||||
"id": "6c037f27-12d7-4e83-811e-4245c0e3ba58",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:39.446901Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:39.446773Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:40.301906Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:40.300918Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"AIMessage(content=\"I'm an AI language model created by OpenAI, and I don't have a personal name. You can call me Assistant or any other name you prefer! How can I assist you today?\", response_metadata={'token_usage': {'completion_tokens': 37, 'prompt_tokens': 11, 'total_tokens': 48}, 'model_name': 'gpt-4o-2024-05-13', 'system_fingerprint': 'fp_d576307f90', 'finish_reason': 'stop', 'logprobs': None}, id='run-5428ab5c-b5c0-46de-9946-5d4ca40dbdc8-0', usage_metadata={'input_tokens': 11, 'output_tokens': 37, 'total_tokens': 48})"
|
||||
"AIMessage(content=\"I'm an AI created by OpenAI, and I don't have a personal name. How can I assist you today?\", additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 23, 'prompt_tokens': 11, 'total_tokens': 34}, 'model_name': 'gpt-4o-2024-05-13', 'system_fingerprint': 'fp_25624ae3a5', 'finish_reason': 'stop', 'logprobs': None}, id='run-b41df187-4627-490d-af3c-1c96282d3eb0-0', usage_metadata={'input_tokens': 11, 'output_tokens': 23, 'total_tokens': 34})"
|
||||
]
|
||||
},
|
||||
"execution_count": 5,
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -141,17 +206,24 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 5,
|
||||
"id": "321e3036-abd2-4e1f-bcc6-606efd036954",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:40.316030Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:40.315628Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:41.199134Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:41.198173Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"AIMessage(content=\"My name is Claude. It's nice to meet you!\", response_metadata={'id': 'msg_012XvotUJ3kGLXJUWKBVxJUi', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 11, 'output_tokens': 15}}, id='run-1ad1eefe-f1c6-4244-8bc6-90e2cb7ee554-0', usage_metadata={'input_tokens': 11, 'output_tokens': 15, 'total_tokens': 26})"
|
||||
"AIMessage(content=\"My name is Claude. It's nice to meet you!\", additional_kwargs={}, response_metadata={'id': 'msg_01Fx9P74A7syoFkwE73CdMMY', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 11, 'output_tokens': 15}}, id='run-a0fd2bbd-3b7e-46bf-8d69-a48c7e60b03c-0', usage_metadata={'input_tokens': 11, 'output_tokens': 15, 'total_tokens': 26})"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -174,17 +246,24 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": 6,
|
||||
"id": "814a2289-d0db-401e-b555-d5116112b413",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:41.203346Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:41.203004Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:41.891450Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:41.890539Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"AIMessage(content=\"I'm an AI language model created by OpenAI, and I don't have a personal name. You can call me Assistant or any other name you prefer! How can I assist you today?\", response_metadata={'token_usage': {'completion_tokens': 37, 'prompt_tokens': 11, 'total_tokens': 48}, 'model_name': 'gpt-4o-2024-05-13', 'system_fingerprint': 'fp_ce0793330f', 'finish_reason': 'stop', 'logprobs': None}, id='run-3923e328-7715-4cd6-b215-98e4b6bf7c9d-0', usage_metadata={'input_tokens': 11, 'output_tokens': 37, 'total_tokens': 48})"
|
||||
"AIMessage(content=\"I'm an AI created by OpenAI, and I don't have a personal name. How can I assist you today?\", additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 23, 'prompt_tokens': 11, 'total_tokens': 34}, 'model_name': 'gpt-4o-2024-05-13', 'system_fingerprint': 'fp_25624ae3a5', 'finish_reason': 'stop', 'logprobs': None}, id='run-3380f977-4b89-4f44-bc02-b64043b3166f-0', usage_metadata={'input_tokens': 11, 'output_tokens': 23, 'total_tokens': 34})"
|
||||
]
|
||||
},
|
||||
"execution_count": 9,
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -202,17 +281,24 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"execution_count": 7,
|
||||
"id": "6c8755ba-c001-4f5a-a497-be3f1db83244",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:41.896413Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:41.895967Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:42.767565Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:42.766619Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"AIMessage(content=\"My name is Claude. It's nice to meet you!\", response_metadata={'id': 'msg_01RyYR64DoMPNCfHeNnroMXm', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 11, 'output_tokens': 15}}, id='run-22446159-3723-43e6-88df-b84797e7751d-0', usage_metadata={'input_tokens': 11, 'output_tokens': 15, 'total_tokens': 26})"
|
||||
"AIMessage(content=\"My name is Claude. It's nice to meet you!\", additional_kwargs={}, response_metadata={'id': 'msg_01EFKSWpmsn2PSYPQa4cNHWb', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 11, 'output_tokens': 15}}, id='run-3c58f47c-41b9-4e56-92e7-fb9602e3787c-0', usage_metadata={'input_tokens': 11, 'output_tokens': 15, 'total_tokens': 26})"
|
||||
]
|
||||
},
|
||||
"execution_count": 10,
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -242,28 +328,37 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"execution_count": 8,
|
||||
"id": "067dabee-1050-4110-ae24-c48eba01e13b",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:42.771941Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:42.771606Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:43.909206Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:43.908496Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"[{'name': 'GetPopulation',\n",
|
||||
" 'args': {'location': 'Los Angeles, CA'},\n",
|
||||
" 'id': 'call_sYT3PFMufHGWJD32Hi2CTNUP'},\n",
|
||||
" 'id': 'call_Ga9m8FAArIyEjItHmztPYA22',\n",
|
||||
" 'type': 'tool_call'},\n",
|
||||
" {'name': 'GetPopulation',\n",
|
||||
" 'args': {'location': 'New York, NY'},\n",
|
||||
" 'id': 'call_j1qjhxRnD3ffQmRyqjlI1Lnk'}]"
|
||||
" 'id': 'call_jh2dEvBaAHRaw5JUDthOs7rt',\n",
|
||||
" 'type': 'tool_call'}]"
|
||||
]
|
||||
},
|
||||
"execution_count": 7,
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class GetWeather(BaseModel):\n",
|
||||
@@ -288,22 +383,31 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"execution_count": 9,
|
||||
"id": "e57dfe9f-cd24-4e37-9ce9-ccf8daf78f89",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:43.912746Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:43.912447Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:46.437049Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:46.436093Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"[{'name': 'GetPopulation',\n",
|
||||
" 'args': {'location': 'Los Angeles, CA'},\n",
|
||||
" 'id': 'toolu_01CxEHxKtVbLBrvzFS7GQ5xR'},\n",
|
||||
" 'id': 'toolu_01JMufPf4F4t2zLj7miFeqXp',\n",
|
||||
" 'type': 'tool_call'},\n",
|
||||
" {'name': 'GetPopulation',\n",
|
||||
" 'args': {'location': 'New York City, NY'},\n",
|
||||
" 'id': 'toolu_013A79qt5toWSsKunFBDZd5S'}]"
|
||||
" 'id': 'toolu_01RQBHcE8kEEbYTuuS8WqY1u',\n",
|
||||
" 'type': 'tool_call'}]"
|
||||
]
|
||||
},
|
||||
"execution_count": 8,
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
|
||||
@@ -289,7 +289,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Joke(BaseModel):\n",
|
||||
|
||||
@@ -190,7 +190,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class GSchema(BaseModel):\n",
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"|---------------|---------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|\n",
|
||||
"| name | str | Must be unique within a set of tools provided to an LLM or agent. |\n",
|
||||
"| description | str | Describes what the tool does. Used as context by the LLM or agent. |\n",
|
||||
"| args_schema | langchain.pydantic_v1.BaseModel | Optional but recommended, and required if using callback handlers. It can be used to provide more information (e.g., few-shot examples) or validation for expected parameters. |\n",
|
||||
"| args_schema | pydantic.BaseModel | Optional but recommended, and required if using callback handlers. It can be used to provide more information (e.g., few-shot examples) or validation for expected parameters. |\n",
|
||||
"| return_direct | boolean | Only relevant for agents. When True, after invoking the given tool, the agent will stop and return the result direcly to the user. |\n",
|
||||
"\n",
|
||||
"LangChain supports the creation of tools from:\n",
|
||||
@@ -48,7 +48,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "cc7005cd-072f-4d37-8453-6297468e5192",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:52.645451Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:52.645081Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.030958Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.030669Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@@ -88,7 +95,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "0c0991db-b997-4611-be37-4346e660506b",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.032544Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.032420Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.035349Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.035123Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.tools import tool\n",
|
||||
@@ -112,22 +126,29 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "5626423f-053e-4a66-adca-1d794d835397",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.036658Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.036574Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.041154Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.040964Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'title': 'multiply_by_maxSchema',\n",
|
||||
" 'description': 'Multiply a by the maximum of b.',\n",
|
||||
" 'type': 'object',\n",
|
||||
" 'properties': {'a': {'title': 'A',\n",
|
||||
" 'description': 'scale factor',\n",
|
||||
"{'description': 'Multiply a by the maximum of b.',\n",
|
||||
" 'properties': {'a': {'description': 'scale factor',\n",
|
||||
" 'title': 'A',\n",
|
||||
" 'type': 'string'},\n",
|
||||
" 'b': {'title': 'B',\n",
|
||||
" 'description': 'list of ints over which to take maximum',\n",
|
||||
" 'type': 'array',\n",
|
||||
" 'items': {'type': 'integer'}}},\n",
|
||||
" 'required': ['a', 'b']}"
|
||||
" 'b': {'description': 'list of ints over which to take maximum',\n",
|
||||
" 'items': {'type': 'integer'},\n",
|
||||
" 'title': 'B',\n",
|
||||
" 'type': 'array'}},\n",
|
||||
" 'required': ['a', 'b'],\n",
|
||||
" 'title': 'multiply_by_maxSchema',\n",
|
||||
" 'type': 'object'}"
|
||||
]
|
||||
},
|
||||
"execution_count": 3,
|
||||
@@ -163,7 +184,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "9216d03a-f6ea-4216-b7e1-0661823a4c0b",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.042516Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.042427Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.045217Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.045010Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@@ -171,13 +199,13 @@
|
||||
"text": [
|
||||
"multiplication-tool\n",
|
||||
"Multiply two numbers.\n",
|
||||
"{'a': {'title': 'A', 'description': 'first number', 'type': 'integer'}, 'b': {'title': 'B', 'description': 'second number', 'type': 'integer'}}\n",
|
||||
"{'a': {'description': 'first number', 'title': 'A', 'type': 'integer'}, 'b': {'description': 'second number', 'title': 'B', 'type': 'integer'}}\n",
|
||||
"True\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class CalculatorInput(BaseModel):\n",
|
||||
@@ -218,19 +246,26 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "336f5538-956e-47d5-9bde-b732559f9e61",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.046526Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.046456Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.050045Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.049836Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'title': 'fooSchema',\n",
|
||||
" 'description': 'The foo.',\n",
|
||||
" 'type': 'object',\n",
|
||||
" 'properties': {'bar': {'title': 'Bar',\n",
|
||||
" 'description': 'The bar.',\n",
|
||||
"{'description': 'The foo.',\n",
|
||||
" 'properties': {'bar': {'description': 'The bar.',\n",
|
||||
" 'title': 'Bar',\n",
|
||||
" 'type': 'string'},\n",
|
||||
" 'baz': {'title': 'Baz', 'description': 'The baz.', 'type': 'integer'}},\n",
|
||||
" 'required': ['bar', 'baz']}"
|
||||
" 'baz': {'description': 'The baz.', 'title': 'Baz', 'type': 'integer'}},\n",
|
||||
" 'required': ['bar', 'baz'],\n",
|
||||
" 'title': 'fooSchema',\n",
|
||||
" 'type': 'object'}"
|
||||
]
|
||||
},
|
||||
"execution_count": 5,
|
||||
@@ -277,7 +312,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "564fbe6f-11df-402d-b135-ef6ff25e1e63",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.051302Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.051218Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.059704Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.059490Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@@ -320,7 +362,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "6bc055d4-1fbe-4db5-8881-9c382eba6b1b",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.060971Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.060883Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.064615Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.064408Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@@ -329,7 +378,7 @@
|
||||
"6\n",
|
||||
"Calculator\n",
|
||||
"multiply numbers\n",
|
||||
"{'a': {'title': 'A', 'description': 'first number', 'type': 'integer'}, 'b': {'title': 'B', 'description': 'second number', 'type': 'integer'}}\n"
|
||||
"{'a': {'description': 'first number', 'title': 'A', 'type': 'integer'}, 'b': {'description': 'second number', 'title': 'B', 'type': 'integer'}}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
@@ -373,17 +422,32 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": 8,
|
||||
"id": "8ef593c5-cf72-4c10-bfc9-7d21874a0c24",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.065797Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.065733Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.130458Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.130229Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"/var/folders/4j/2rz3865x6qg07tx43146py8h0000gn/T/ipykernel_95770/2548361071.py:14: LangChainBetaWarning: This API is in beta and may change in the future.\n",
|
||||
" as_tool = chain.as_tool(\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'answer_style': {'title': 'Answer Style', 'type': 'string'}}"
|
||||
]
|
||||
},
|
||||
"execution_count": 9,
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -428,19 +492,26 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"execution_count": 9,
|
||||
"id": "1dad8f8e",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.131904Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.131803Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.136797Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.136563Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import Optional, Type\n",
|
||||
"\n",
|
||||
"from langchain.pydantic_v1 import BaseModel\n",
|
||||
"from langchain_core.callbacks import (\n",
|
||||
" AsyncCallbackManagerForToolRun,\n",
|
||||
" CallbackManagerForToolRun,\n",
|
||||
")\n",
|
||||
"from langchain_core.tools import BaseTool\n",
|
||||
"from pydantic import BaseModel\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class CalculatorInput(BaseModel):\n",
|
||||
@@ -448,9 +519,11 @@
|
||||
" b: int = Field(description=\"second number\")\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Note: It's important that every field has type hints. BaseTool is a\n",
|
||||
"# Pydantic class and not having type hints can lead to unexpected behavior.\n",
|
||||
"class CustomCalculatorTool(BaseTool):\n",
|
||||
" name = \"Calculator\"\n",
|
||||
" description = \"useful for when you need to answer questions about math\"\n",
|
||||
" name: str = \"Calculator\"\n",
|
||||
" description: str = \"useful for when you need to answer questions about math\"\n",
|
||||
" args_schema: Type[BaseModel] = CalculatorInput\n",
|
||||
" return_direct: bool = True\n",
|
||||
"\n",
|
||||
@@ -477,9 +550,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"execution_count": 10,
|
||||
"id": "bb551c33",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.138074Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.138007Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.141360Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.141158Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@@ -487,7 +567,7 @@
|
||||
"text": [
|
||||
"Calculator\n",
|
||||
"useful for when you need to answer questions about math\n",
|
||||
"{'a': {'title': 'A', 'description': 'first number', 'type': 'integer'}, 'b': {'title': 'B', 'description': 'second number', 'type': 'integer'}}\n",
|
||||
"{'a': {'description': 'first number', 'title': 'A', 'type': 'integer'}, 'b': {'description': 'second number', 'title': 'B', 'type': 'integer'}}\n",
|
||||
"True\n",
|
||||
"6\n",
|
||||
"6\n"
|
||||
@@ -528,9 +608,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"execution_count": 11,
|
||||
"id": "6615cb77-fd4c-4676-8965-f92cc71d4944",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.142587Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.142504Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.147205Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.146995Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@@ -560,9 +647,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"execution_count": 12,
|
||||
"id": "bb2af583-eadd-41f4-a645-bf8748bd3dcd",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.148383Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.148307Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.152684Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.152486Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@@ -605,9 +699,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"execution_count": 13,
|
||||
"id": "4ad0932c-8610-4278-8c57-f9218f654c8a",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.153849Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.153773Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.158312Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.158130Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@@ -650,9 +751,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 15,
|
||||
"execution_count": 14,
|
||||
"id": "7094c0e8-6192-4870-a942-aad5b5ae48fd",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.159440Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.159364Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.160922Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.160712Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.tools import ToolException\n",
|
||||
@@ -673,9 +781,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 16,
|
||||
"execution_count": 15,
|
||||
"id": "b4d22022-b105-4ccc-a15b-412cb9ea3097",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.162046Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.161968Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.165236Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.165052Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
@@ -683,7 +798,7 @@
|
||||
"'Error: There is no city by the name of foobar.'"
|
||||
]
|
||||
},
|
||||
"execution_count": 16,
|
||||
"execution_count": 15,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -707,9 +822,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 17,
|
||||
"execution_count": 16,
|
||||
"id": "3fad1728-d367-4e1b-9b54-3172981271cf",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.166372Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.166294Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.169739Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.169553Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
@@ -717,7 +839,7 @@
|
||||
"\"There is no such city, but it's probably above 0K there!\""
|
||||
]
|
||||
},
|
||||
"execution_count": 17,
|
||||
"execution_count": 16,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -741,9 +863,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"execution_count": 17,
|
||||
"id": "ebfe7c1f-318d-4e58-99e1-f31e69473c46",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.170937Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.170859Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.174498Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.174304Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
@@ -751,7 +880,7 @@
|
||||
"'The following errors occurred during tool execution: `Error: There is no city by the name of foobar.`'"
|
||||
]
|
||||
},
|
||||
"execution_count": 18,
|
||||
"execution_count": 17,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -791,9 +920,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"execution_count": 18,
|
||||
"id": "14905425-0334-43a0-9de9-5bcf622ede0e",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.175683Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.175605Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.178798Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.178601Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import random\n",
|
||||
@@ -820,9 +956,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": 19,
|
||||
"id": "0f2e1528-404b-46e6-b87c-f0957c4b9217",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.179881Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.179807Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.182100Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.181940Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
@@ -830,7 +973,7 @@
|
||||
"'Successfully generated array of 10 random ints in [0, 9].'"
|
||||
]
|
||||
},
|
||||
"execution_count": 9,
|
||||
"execution_count": 19,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -849,17 +992,24 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": 20,
|
||||
"id": "cc197777-26eb-46b3-a83b-c2ce116c6311",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.183238Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.183170Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.185752Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.185567Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"ToolMessage(content='Successfully generated array of 10 random ints in [0, 9].', name='generate_random_ints', tool_call_id='123', artifact=[1, 4, 2, 5, 3, 9, 0, 4, 7, 7])"
|
||||
"ToolMessage(content='Successfully generated array of 10 random ints in [0, 9].', name='generate_random_ints', tool_call_id='123', artifact=[4, 8, 2, 4, 1, 0, 9, 5, 8, 1])"
|
||||
]
|
||||
},
|
||||
"execution_count": 3,
|
||||
"execution_count": 20,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -885,9 +1035,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 21,
|
||||
"id": "fe1a09d1-378b-4b91-bb5e-0697c3d7eb92",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.186884Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.186803Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.190718Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.190494Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.tools import BaseTool\n",
|
||||
@@ -917,17 +1074,24 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"execution_count": 22,
|
||||
"id": "8c3d16f6-1c4a-48ab-b05a-38547c592e79",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:25:53.191872Z",
|
||||
"iopub.status.busy": "2024-09-10T20:25:53.191794Z",
|
||||
"iopub.status.idle": "2024-09-10T20:25:53.194396Z",
|
||||
"shell.execute_reply": "2024-09-10T20:25:53.194184Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"ToolMessage(content='Generated 3 floats in [0.1, 3.3333], rounded to 4 decimals.', name='generate_random_floats', tool_call_id='123', artifact=[1.4277, 0.7578, 2.4871])"
|
||||
"ToolMessage(content='Generated 3 floats in [0.1, 3.3333], rounded to 4 decimals.', name='generate_random_floats', tool_call_id='123', artifact=[1.5566, 0.5134, 2.7914])"
|
||||
]
|
||||
},
|
||||
"execution_count": 8,
|
||||
"execution_count": 22,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
|
||||
@@ -29,9 +29,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": 1,
|
||||
"id": "89579144-bcb3-490a-8036-86a0a6bcd56b",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:41.780410Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:41.780102Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:42.147112Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:42.146838Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
|
||||
@@ -67,17 +74,24 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": 2,
|
||||
"id": "610c3025-ea63-4cd7-88bd-c8cbcb4d8a3f",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:42.148746Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:42.148621Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:42.162044Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:42.161794Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"ChatPromptValue(messages=[SystemMessage(content=\"You are an expert extraction algorithm. Only extract relevant information from the text. If you do not know the value of an attribute asked to extract, return null for the attribute's value.\"), HumanMessage(content='testing 1 2 3'), HumanMessage(content='this is some text')])"
|
||||
"ChatPromptValue(messages=[SystemMessage(content=\"You are an expert extraction algorithm. Only extract relevant information from the text. If you do not know the value of an attribute asked to extract, return null for the attribute's value.\", additional_kwargs={}, response_metadata={}), HumanMessage(content='testing 1 2 3', additional_kwargs={}, response_metadata={}), HumanMessage(content='this is some text', additional_kwargs={}, response_metadata={})])"
|
||||
]
|
||||
},
|
||||
"execution_count": 3,
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -104,15 +118,22 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 3,
|
||||
"id": "d875a49a-d2cb-4b9e-b5bf-41073bc3905c",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:42.163477Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:42.163391Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:42.324449Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:42.324206Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List, Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Person(BaseModel):\n",
|
||||
@@ -162,9 +183,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 4,
|
||||
"id": "08356810-77ce-4e68-99d9-faa0326f2cee",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:42.326100Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:42.326016Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:42.329260Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:42.329014Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
@@ -177,7 +205,7 @@
|
||||
" SystemMessage,\n",
|
||||
" ToolMessage,\n",
|
||||
")\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Example(TypedDict):\n",
|
||||
@@ -238,9 +266,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 5,
|
||||
"id": "7f59a745-5c81-4011-a4c5-a33ec1eca7ef",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:42.330580Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:42.330488Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:42.332813Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:42.332598Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"examples = [\n",
|
||||
@@ -273,22 +308,29 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"execution_count": 6,
|
||||
"id": "976bb7b8-09c4-4a3e-80df-49a483705c08",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:42.333955Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:42.333876Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:42.336841Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:42.336635Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"system: content=\"You are an expert extraction algorithm. Only extract relevant information from the text. If you do not know the value of an attribute asked to extract, return null for the attribute's value.\"\n",
|
||||
"human: content=\"The ocean is vast and blue. It's more than 20,000 feet deep. There are many fish in it.\"\n",
|
||||
"ai: content='' tool_calls=[{'name': 'Person', 'args': {'name': None, 'hair_color': None, 'height_in_meters': None}, 'id': 'b843ba77-4c9c-48ef-92a4-54e534f24521'}]\n",
|
||||
"tool: content='You have correctly called this tool.' tool_call_id='b843ba77-4c9c-48ef-92a4-54e534f24521'\n",
|
||||
"human: content='Fiona traveled far from France to Spain.'\n",
|
||||
"ai: content='' tool_calls=[{'name': 'Person', 'args': {'name': 'Fiona', 'hair_color': None, 'height_in_meters': None}, 'id': '46f00d6b-50e5-4482-9406-b07bb10340f6'}]\n",
|
||||
"tool: content='You have correctly called this tool.' tool_call_id='46f00d6b-50e5-4482-9406-b07bb10340f6'\n",
|
||||
"human: content='this is some text'\n"
|
||||
"system: content=\"You are an expert extraction algorithm. Only extract relevant information from the text. If you do not know the value of an attribute asked to extract, return null for the attribute's value.\" additional_kwargs={} response_metadata={}\n",
|
||||
"human: content=\"The ocean is vast and blue. It's more than 20,000 feet deep. There are many fish in it.\" additional_kwargs={} response_metadata={}\n",
|
||||
"ai: content='' additional_kwargs={} response_metadata={} tool_calls=[{'name': 'Data', 'args': {'people': []}, 'id': '240159b1-1405-4107-a07c-3c6b91b3d5b7', 'type': 'tool_call'}]\n",
|
||||
"tool: content='You have correctly called this tool.' tool_call_id='240159b1-1405-4107-a07c-3c6b91b3d5b7'\n",
|
||||
"human: content='Fiona traveled far from France to Spain.' additional_kwargs={} response_metadata={}\n",
|
||||
"ai: content='' additional_kwargs={} response_metadata={} tool_calls=[{'name': 'Data', 'args': {'people': [{'name': 'Fiona', 'hair_color': None, 'height_in_meters': None}]}, 'id': '3fc521e4-d1d2-4c20-bf40-e3d72f1068da', 'type': 'tool_call'}]\n",
|
||||
"tool: content='You have correctly called this tool.' tool_call_id='3fc521e4-d1d2-4c20-bf40-e3d72f1068da'\n",
|
||||
"human: content='this is some text' additional_kwargs={} response_metadata={}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
@@ -320,9 +362,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"execution_count": 7,
|
||||
"id": "df2e1ee1-69e8-4c4d-b349-95f2e320317b",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:42.338001Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:42.337915Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:42.349121Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:42.348908Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# | output: false\n",
|
||||
@@ -343,9 +392,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": 8,
|
||||
"id": "dbfea43d-769b-42e9-a76f-ce722f7d6f93",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:42.350335Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:42.350264Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:42.424894Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:42.424623Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"runnable = prompt | llm.with_structured_output(\n",
|
||||
@@ -367,18 +423,49 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"execution_count": 9,
|
||||
"id": "66545cab-af2a-40a4-9dc9-b4110458b7d3",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:42.426258Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:42.426187Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:46.151633Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:46.150690Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"people=[Person(name='earth', hair_color='null', height_in_meters='null')]\n",
|
||||
"people=[Person(name='earth', hair_color='null', height_in_meters='null')]\n",
|
||||
"people=[]\n",
|
||||
"people=[Person(name='earth', hair_color='null', height_in_meters='null')]\n",
|
||||
"people=[]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"people=[]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"people=[]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"people=[]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"people=[]\n"
|
||||
]
|
||||
}
|
||||
@@ -401,18 +488,49 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"execution_count": 10,
|
||||
"id": "1c09d805-ec16-4123-aef9-6a5b59499b5c",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:46.155346Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:46.155110Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:51.810359Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:51.809636Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"people=[]\n",
|
||||
"people=[]\n",
|
||||
"people=[]\n",
|
||||
"people=[]\n",
|
||||
"people=[]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"people=[]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"people=[]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"people=[]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"people=[]\n"
|
||||
]
|
||||
}
|
||||
@@ -435,9 +553,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"execution_count": 11,
|
||||
"id": "a9b7a762-1b75-4f9f-b9d9-6732dd05802c",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:26:51.813309Z",
|
||||
"iopub.status.busy": "2024-09-10T20:26:51.813150Z",
|
||||
"iopub.status.idle": "2024-09-10T20:26:53.474153Z",
|
||||
"shell.execute_reply": "2024-09-10T20:26:53.473522Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
@@ -445,7 +570,7 @@
|
||||
"Data(people=[Person(name='Harrison', hair_color='black', height_in_meters=None)])"
|
||||
]
|
||||
},
|
||||
"execution_count": 12,
|
||||
"execution_count": 11,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -476,7 +601,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.4"
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -23,16 +23,56 @@
|
||||
"id": "57969139-ad0a-487e-97d8-cb30e2af9742",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Set up\n",
|
||||
"## Setup\n",
|
||||
"\n",
|
||||
"We need some example data! Let's download an article about [cars from wikipedia](https://en.wikipedia.org/wiki/Car) and load it as a LangChain [Document](https://python.langchain.com/v0.2/api_reference/core/documents/langchain_core.documents.base.Document.html)."
|
||||
"First we'll install the dependencies needed for this guide:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "84460db2-36e1-4037-bfa6-2a11883c2ba5",
|
||||
"id": "a3b4d838-5be4-4207-8a4a-9ef5624c48f2",
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:19.850767Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:19.850427Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:21.432233Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:21.431606Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Note: you may need to restart the kernel to use updated packages.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"%pip install -qU langchain-community lxml faiss-cpu langchain-openai"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "ac000b03-33fc-414f-8f2c-3850df621a35",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Now we need some example data! Let's download an article about [cars from wikipedia](https://en.wikipedia.org/wiki/Car) and load it as a LangChain [Document](https://python.langchain.com/v0.2/api_reference/core/documents/langchain_core.documents.base.Document.html)."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "84460db2-36e1-4037-bfa6-2a11883c2ba5",
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:21.434882Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:21.434571Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:22.214545Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:22.214253Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import re\n",
|
||||
@@ -55,15 +95,22 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": 3,
|
||||
"id": "fcb6917b-123d-4630-a0ce-ed8b293d482d",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:22.216143Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:22.216039Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:22.218117Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:22.217854Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"79174\n"
|
||||
"80427\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
@@ -87,13 +134,20 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "a3b288ed-87a6-4af0-aac8-20921dc370d4",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:22.219468Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:22.219395Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:22.340594Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:22.340319Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List, Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class KeyDevelopment(BaseModel):\n",
|
||||
@@ -156,7 +210,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "109f4f05-d0ff-431d-93d9-8f5aa34979a6",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:22.342277Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:22.342171Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:22.532302Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:22.532034Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# | output: false\n",
|
||||
@@ -171,7 +232,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "aa4ae224-6d3d-4fe2-b210-7db19a9fe580",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:22.533795Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:22.533708Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:22.610573Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:22.610307Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"extractor = prompt | llm.with_structured_output(\n",
|
||||
@@ -194,7 +262,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "27b8a373-14b3-45ea-8bf5-9749122ad927",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:22.612123Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:22.612052Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:22.753493Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:22.753179Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_text_splitters import TokenTextSplitter\n",
|
||||
@@ -227,7 +302,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "6ba766b5-8d6c-48e6-8d69-f391a66b65d2",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:22.755067Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:22.754987Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:36.691130Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:36.690500Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Limit just to the first 3 chunks\n",
|
||||
@@ -254,21 +336,27 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"id": "c3f77470-ce6c-477f-8957-650913218632",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:36.694799Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:36.694458Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:36.701416Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:36.700993Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"[KeyDevelopment(year=1966, description='The Toyota Corolla began production, becoming the best-selling series of automobile in history.', evidence='The Toyota Corolla, which has been in production since 1966, is the best-selling series of automobile in history.'),\n",
|
||||
" KeyDevelopment(year=1769, description='Nicolas-Joseph Cugnot built the first steam-powered road vehicle.', evidence='The French inventor Nicolas-Joseph Cugnot built the first steam-powered road vehicle in 1769.'),\n",
|
||||
" KeyDevelopment(year=1808, description='François Isaac de Rivaz designed and constructed the first internal combustion-powered automobile.', evidence='the Swiss inventor François Isaac de Rivaz designed and constructed the first internal combustion-powered automobile in 1808.'),\n",
|
||||
" KeyDevelopment(year=1886, description='Carl Benz patented his Benz Patent-Motorwagen, inventing the modern car.', evidence='The modern car—a practical, marketable automobile for everyday use—was invented in 1886, when the German inventor Carl Benz patented his Benz Patent-Motorwagen.'),\n",
|
||||
" KeyDevelopment(year=1908, description='Ford Model T, one of the first cars affordable by the masses, began production.', evidence='One of the first cars affordable by the masses was the Ford Model T, begun in 1908, an American car manufactured by the Ford Motor Company.'),\n",
|
||||
" KeyDevelopment(year=1888, description=\"Bertha Benz undertook the first road trip by car to prove the road-worthiness of her husband's invention.\", evidence=\"In August 1888, Bertha Benz, the wife of Carl Benz, undertook the first road trip by car, to prove the road-worthiness of her husband's invention.\"),\n",
|
||||
"[KeyDevelopment(year=1769, description='Nicolas-Joseph Cugnot built the first full-scale, self-propelled mechanical vehicle, a steam-powered tricycle.', evidence='Nicolas-Joseph Cugnot is widely credited with building the first full-scale, self-propelled mechanical vehicle in about 1769; he created a steam-powered tricycle.'),\n",
|
||||
" KeyDevelopment(year=1807, description=\"Nicéphore Niépce and his brother Claude created what was probably the world's first internal combustion engine.\", evidence=\"In 1807, Nicéphore Niépce and his brother Claude created what was probably the world's first internal combustion engine (which they called a Pyréolophore), but installed it in a boat on the river Saone in France.\"),\n",
|
||||
" KeyDevelopment(year=1886, description='Carl Benz patented the Benz Patent-Motorwagen, marking the birth of the modern car.', evidence='In November 1881, French inventor Gustave Trouvé demonstrated a three-wheeled car powered by electricity at the International Exposition of Electricity. Although several other German engineers (including Gottlieb Daimler, Wilhelm Maybach, and Siegfried Marcus) were working on cars at about the same time, the year 1886 is regarded as the birth year of the modern car—a practical, marketable automobile for everyday use—when the German Carl Benz patented his Benz Patent-Motorwagen; he is generally acknowledged as the inventor of the car.'),\n",
|
||||
" KeyDevelopment(year=1886, description='Carl Benz began promotion of his vehicle, marking the introduction of the first commercially available automobile.', evidence='Benz began promotion of the vehicle on 3 July 1886.'),\n",
|
||||
" KeyDevelopment(year=1888, description=\"Bertha Benz undertook the first road trip by car to prove the road-worthiness of her husband's invention.\", evidence=\"In August 1888, Bertha Benz, the wife and business partner of Carl Benz, undertook the first road trip by car, to prove the road-worthiness of her husband's invention.\"),\n",
|
||||
" KeyDevelopment(year=1896, description='Benz designed and patented the first internal-combustion flat engine, called boxermotor.', evidence='In 1896, Benz designed and patented the first internal-combustion flat engine, called boxermotor.'),\n",
|
||||
" KeyDevelopment(year=1897, description='Nesselsdorfer Wagenbau produced the Präsident automobil, one of the first factory-made cars in the world.', evidence='The first motor car in central Europe and one of the first factory-made cars in the world, was produced by Czech company Nesselsdorfer Wagenbau (later renamed to Tatra) in 1897, the Präsident automobil.'),\n",
|
||||
" KeyDevelopment(year=1890, description='Daimler Motoren Gesellschaft (DMG) was founded by Daimler and Maybach in Cannstatt.', evidence='Daimler and Maybach founded Daimler Motoren Gesellschaft (DMG) in Cannstatt in 1890.'),\n",
|
||||
" KeyDevelopment(year=1891, description='Auguste Doriot and Louis Rigoulot completed the longest trip by a petrol-driven vehicle with a Daimler powered Peugeot Type 3.', evidence='In 1891, Auguste Doriot and his Peugeot colleague Louis Rigoulot completed the longest trip by a petrol-driven vehicle when their self-designed and built Daimler powered Peugeot Type 3 completed 2,100 kilometres (1,300 mi) from Valentigney to Paris and Brest and back again.')]"
|
||||
" KeyDevelopment(year=1897, description='The first motor car in central Europe and one of the first factory-made cars in the world, the Präsident automobil, was produced by Nesselsdorfer Wagenbau.', evidence='The first motor car in central Europe and one of the first factory-made cars in the world, was produced by Czech company Nesselsdorfer Wagenbau (later renamed to Tatra) in 1897, the Präsident automobil.'),\n",
|
||||
" KeyDevelopment(year=1901, description='Ransom Olds started large-scale, production-line manufacturing of affordable cars at his Oldsmobile factory in Lansing, Michigan.', evidence='Large-scale, production-line manufacturing of affordable cars was started by Ransom Olds in 1901 at his Oldsmobile factory in Lansing, Michigan.'),\n",
|
||||
" KeyDevelopment(year=1913, description=\"Henry Ford introduced the world's first moving assembly line for cars at the Highland Park Ford Plant.\", evidence=\"This concept was greatly expanded by Henry Ford, beginning in 1913 with the world's first moving assembly line for cars at the Highland Park Ford Plant.\")]"
|
||||
]
|
||||
},
|
||||
"execution_count": 9,
|
||||
@@ -315,7 +403,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"id": "aaf37c82-625b-4fa1-8e88-73303f08ac16",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:36.703897Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:36.703718Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:38.451523Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:38.450925Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.vectorstores import FAISS\n",
|
||||
@@ -344,7 +439,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"id": "47aad00b-7013-4f7f-a1b0-02ef269093bf",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:38.455094Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:38.454851Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:38.458315Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:38.457940Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rag_extractor = {\n",
|
||||
@@ -356,7 +458,14 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"id": "68f2de01-0cd8-456e-a959-db236189d41b",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:38.460115Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:38.459949Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:43.195532Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:43.194254Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"results = rag_extractor.invoke(\"Key developments associated with cars\")"
|
||||
@@ -366,15 +475,21 @@
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"id": "1788e2d6-77bb-417f-827c-eb96c035164e",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:43.200497Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:43.200037Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:43.206773Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:43.205426Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"year=1869 description='Mary Ward became one of the first documented car fatalities in Parsonstown, Ireland.' evidence='Mary Ward became one of the first documented car fatalities in 1869 in Parsonstown, Ireland,'\n",
|
||||
"year=1899 description=\"Henry Bliss one of the US's first pedestrian car casualties in New York City.\" evidence=\"Henry Bliss one of the US's first pedestrian car casualties in 1899 in New York City.\"\n",
|
||||
"year=2030 description='All fossil fuel vehicles will be banned in Amsterdam.' evidence='all fossil fuel vehicles will be banned in Amsterdam from 2030.'\n"
|
||||
"year=2006 description='Car-sharing services in the US experienced double-digit growth in revenue and membership.' evidence='in the US, some car-sharing services have experienced double-digit growth in revenue and membership growth between 2006 and 2007.'\n",
|
||||
"year=2020 description='56 million cars were manufactured worldwide, with China producing the most.' evidence='In 2020, there were 56 million cars manufactured worldwide, down from 67 million the previous year. The automotive industry in China produces by far the most (20 million in 2020).'\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
@@ -416,7 +531,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.4"
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -27,9 +27,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": 1,
|
||||
"id": "25487939-8713-4ec7-b774-e4a761ac8298",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:44.442501Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:44.442044Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:44.872217Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:44.871897Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# | output: false\n",
|
||||
@@ -62,16 +69,23 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": 2,
|
||||
"id": "497eb023-c043-443d-ac62-2d4ea85fe1b0",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:44.873979Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:44.873840Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:44.878966Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:44.878718Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List, Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.output_parsers import PydanticOutputParser\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field, validator\n",
|
||||
"from pydantic import BaseModel, Field, validator\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Person(BaseModel):\n",
|
||||
@@ -114,9 +128,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 3,
|
||||
"id": "20b99ffb-a114-49a9-a7be-154c525f8ada",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:44.880355Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:44.880277Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:44.881834Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:44.881601Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"query = \"Anna is 23 years old and she is 6 feet tall\""
|
||||
@@ -124,9 +145,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 4,
|
||||
"id": "4f3a66ce-de19-4571-9e54-67504ae3fba7",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:44.883138Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:44.883049Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:44.885139Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:44.884801Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@@ -140,7 +168,7 @@
|
||||
"\n",
|
||||
"Here is the output schema:\n",
|
||||
"```\n",
|
||||
"{\"description\": \"Identifying information about all people in a text.\", \"properties\": {\"people\": {\"title\": \"People\", \"type\": \"array\", \"items\": {\"$ref\": \"#/definitions/Person\"}}}, \"required\": [\"people\"], \"definitions\": {\"Person\": {\"title\": \"Person\", \"description\": \"Information about a person.\", \"type\": \"object\", \"properties\": {\"name\": {\"title\": \"Name\", \"description\": \"The name of the person\", \"type\": \"string\"}, \"height_in_meters\": {\"title\": \"Height In Meters\", \"description\": \"The height of the person expressed in meters.\", \"type\": \"number\"}}, \"required\": [\"name\", \"height_in_meters\"]}}}\n",
|
||||
"{\"$defs\": {\"Person\": {\"description\": \"Information about a person.\", \"properties\": {\"name\": {\"description\": \"The name of the person\", \"title\": \"Name\", \"type\": \"string\"}, \"height_in_meters\": {\"description\": \"The height of the person expressed in meters.\", \"title\": \"Height In Meters\", \"type\": \"number\"}}, \"required\": [\"name\", \"height_in_meters\"], \"title\": \"Person\", \"type\": \"object\"}}, \"description\": \"Identifying information about all people in a text.\", \"properties\": {\"people\": {\"items\": {\"$ref\": \"#/$defs/Person\"}, \"title\": \"People\", \"type\": \"array\"}}, \"required\": [\"people\"]}\n",
|
||||
"```\n",
|
||||
"Human: Anna is 23 years old and she is 6 feet tall\n"
|
||||
]
|
||||
@@ -160,9 +188,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 5,
|
||||
"id": "7e0041eb-37dc-4384-9fe3-6dd8c356371e",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:44.886765Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:44.886675Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:46.835960Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:46.835282Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
@@ -170,7 +205,7 @@
|
||||
"People(people=[Person(name='Anna', height_in_meters=1.83)])"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -209,9 +244,16 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"execution_count": 6,
|
||||
"id": "b1f11912-c1bb-4a2a-a482-79bf3996961f",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:46.839577Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:46.839233Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:46.849663Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:46.849177Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import json\n",
|
||||
@@ -221,7 +263,7 @@
|
||||
"from langchain_anthropic.chat_models import ChatAnthropic\n",
|
||||
"from langchain_core.messages import AIMessage\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field, validator\n",
|
||||
"from pydantic import BaseModel, Field, validator\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Person(BaseModel):\n",
|
||||
@@ -279,16 +321,23 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"execution_count": 7,
|
||||
"id": "9260d5e8-3b6c-4639-9f3b-fb2f90239e4b",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:46.851870Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:46.851698Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:46.854786Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:46.854424Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"System: Answer the user query. Output your answer as JSON that matches the given schema: ```json\n",
|
||||
"{'title': 'People', 'description': 'Identifying information about all people in a text.', 'type': 'object', 'properties': {'people': {'title': 'People', 'type': 'array', 'items': {'$ref': '#/definitions/Person'}}}, 'required': ['people'], 'definitions': {'Person': {'title': 'Person', 'description': 'Information about a person.', 'type': 'object', 'properties': {'name': {'title': 'Name', 'description': 'The name of the person', 'type': 'string'}, 'height_in_meters': {'title': 'Height In Meters', 'description': 'The height of the person expressed in meters.', 'type': 'number'}}, 'required': ['name', 'height_in_meters']}}}\n",
|
||||
"{'$defs': {'Person': {'description': 'Information about a person.', 'properties': {'name': {'description': 'The name of the person', 'title': 'Name', 'type': 'string'}, 'height_in_meters': {'description': 'The height of the person expressed in meters.', 'title': 'Height In Meters', 'type': 'number'}}, 'required': ['name', 'height_in_meters'], 'title': 'Person', 'type': 'object'}}, 'description': 'Identifying information about all people in a text.', 'properties': {'people': {'items': {'$ref': '#/$defs/Person'}, 'title': 'People', 'type': 'array'}}, 'required': ['people'], 'title': 'People', 'type': 'object'}\n",
|
||||
"```. Make sure to wrap the answer in ```json and ``` tags\n",
|
||||
"Human: Anna is 23 years old and she is 6 feet tall\n"
|
||||
]
|
||||
@@ -301,17 +350,32 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": 8,
|
||||
"id": "c523301d-ae0e-45e3-b195-7fd28c67a5c4",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:35:46.856945Z",
|
||||
"iopub.status.busy": "2024-09-10T20:35:46.856769Z",
|
||||
"iopub.status.idle": "2024-09-10T20:35:48.373728Z",
|
||||
"shell.execute_reply": "2024-09-10T20:35:48.373079Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"/Users/bagatur/langchain/.venv/lib/python3.11/site-packages/pydantic/_internal/_fields.py:201: UserWarning: Field name \"schema\" in \"PromptInput\" shadows an attribute in parent \"BaseModel\"\n",
|
||||
" warnings.warn(\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"[{'people': [{'name': 'Anna', 'height_in_meters': 1.83}]}]"
|
||||
]
|
||||
},
|
||||
"execution_count": 9,
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -349,7 +413,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.4"
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -136,7 +136,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Note that the docstrings here are crucial, as they will be passed along\n",
|
||||
@@ -696,7 +696,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.1"
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -163,8 +163,8 @@
|
||||
"from typing import List, Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(model=\"gpt-3.5-turbo\", temperature=0)\n",
|
||||
"\n",
|
||||
|
||||
@@ -177,14 +177,15 @@
|
||||
"source": [
|
||||
"from typing import Optional, Type\n",
|
||||
"\n",
|
||||
"# Import things that are needed generically\n",
|
||||
"from langchain.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_core.callbacks import (\n",
|
||||
" AsyncCallbackManagerForToolRun,\n",
|
||||
" CallbackManagerForToolRun,\n",
|
||||
")\n",
|
||||
"from langchain_core.tools import BaseTool\n",
|
||||
"\n",
|
||||
"# Import things that are needed generically\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"description_query = \"\"\"\n",
|
||||
"MATCH (m:Movie|Person)\n",
|
||||
"WHERE m.title CONTAINS $candidate OR m.name CONTAINS $candidate\n",
|
||||
@@ -226,14 +227,15 @@
|
||||
"source": [
|
||||
"from typing import Optional, Type\n",
|
||||
"\n",
|
||||
"# Import things that are needed generically\n",
|
||||
"from langchain.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_core.callbacks import (\n",
|
||||
" AsyncCallbackManagerForToolRun,\n",
|
||||
" CallbackManagerForToolRun,\n",
|
||||
")\n",
|
||||
"from langchain_core.tools import BaseTool\n",
|
||||
"\n",
|
||||
"# Import things that are needed generically\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class InformationInput(BaseModel):\n",
|
||||
" entity: str = Field(description=\"movie or a person mentioned in the question\")\n",
|
||||
|
||||
@@ -440,7 +440,7 @@
|
||||
"source": [
|
||||
"from typing import List\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class HypotheticalQuestions(BaseModel):\n",
|
||||
|
||||
@@ -24,8 +24,8 @@
|
||||
"from typing import List\n",
|
||||
"\n",
|
||||
"from langchain_core.output_parsers import PydanticOutputParser\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_openai import ChatOpenAI"
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"from pydantic import BaseModel, Field"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -71,8 +71,8 @@
|
||||
"source": [
|
||||
"from langchain_core.output_parsers import JsonOutputParser\n",
|
||||
"from langchain_core.prompts import PromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI(temperature=0)\n",
|
||||
"\n",
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
"from langchain.output_parsers import OutputFixingParser\n",
|
||||
"from langchain_core.output_parsers import PydanticOutputParser\n",
|
||||
"from langchain_core.prompts import PromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_openai import ChatOpenAI, OpenAI"
|
||||
"from langchain_openai import ChatOpenAI, OpenAI\n",
|
||||
"from pydantic import BaseModel, Field"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -35,17 +35,17 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 1,
|
||||
"id": "1594b2bf-2a6f-47bb-9a81-38930f8e606b",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"Joke(setup='Why did the chicken cross the road?', punchline='To get to the other side!')"
|
||||
"Joke(setup='Why did the tomato turn red?', punchline='Because it saw the salad dressing!')"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -53,8 +53,8 @@
|
||||
"source": [
|
||||
"from langchain_core.output_parsers import PydanticOutputParser\n",
|
||||
"from langchain_core.prompts import PromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field, validator\n",
|
||||
"from langchain_openai import OpenAI\n",
|
||||
"from pydantic import BaseModel, Field, model_validator\n",
|
||||
"\n",
|
||||
"model = OpenAI(model_name=\"gpt-3.5-turbo-instruct\", temperature=0.0)\n",
|
||||
"\n",
|
||||
@@ -65,11 +65,13 @@
|
||||
" punchline: str = Field(description=\"answer to resolve the joke\")\n",
|
||||
"\n",
|
||||
" # You can add custom validation logic easily with Pydantic.\n",
|
||||
" @validator(\"setup\")\n",
|
||||
" def question_ends_with_question_mark(cls, field):\n",
|
||||
" if field[-1] != \"?\":\n",
|
||||
" @model_validator(mode=\"before\")\n",
|
||||
" @classmethod\n",
|
||||
" def question_ends_with_question_mark(cls, values: dict) -> dict:\n",
|
||||
" setup = values[\"setup\"]\n",
|
||||
" if setup[-1] != \"?\":\n",
|
||||
" raise ValueError(\"Badly formed question!\")\n",
|
||||
" return field\n",
|
||||
" return values\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Set up a parser + inject instructions into the prompt template.\n",
|
||||
@@ -239,9 +241,9 @@
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"display_name": "poetry-venv-311",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
"name": "poetry-venv-311"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
@@ -253,7 +255,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.1"
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -70,8 +70,8 @@
|
||||
"source": [
|
||||
"from langchain.output_parsers import YamlOutputParser\n",
|
||||
"from langchain_core.prompts import PromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Define your desired data structure.\n",
|
||||
|
||||
@@ -269,7 +269,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class CitedAnswer(BaseModel):\n",
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
")\n",
|
||||
"from langchain_community.query_constructors.chroma import ChromaTranslator\n",
|
||||
"from langchain_community.query_constructors.elasticsearch import ElasticsearchTranslator\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel"
|
||||
"from pydantic import BaseModel"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -87,7 +87,7 @@
|
||||
"source": [
|
||||
"from typing import List, Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"sub_queries_description = \"\"\"\\\n",
|
||||
"If the original question contains multiple distinct sub-questions, \\\n",
|
||||
|
||||
@@ -154,12 +154,12 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 2,
|
||||
"id": "0ae69afc",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field"
|
||||
"from pydantic import BaseModel, Field, model_validator"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -492,21 +492,24 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 47,
|
||||
"execution_count": 3,
|
||||
"id": "a2e8b434",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import validator\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Search(BaseModel):\n",
|
||||
" query: str\n",
|
||||
" author: str\n",
|
||||
"\n",
|
||||
" @validator(\"author\")\n",
|
||||
" def double(cls, v: str) -> str:\n",
|
||||
" return vectorstore.similarity_search(v, k=1)[0].page_content"
|
||||
" @model_validator(mode=\"before\")\n",
|
||||
" @classmethod\n",
|
||||
" def double(cls, values: dict) -> dict:\n",
|
||||
" author = values[\"author\"]\n",
|
||||
" closest_valid_author = vectorstore.similarity_search(author, k=1)[\n",
|
||||
" 0\n",
|
||||
" ].page_content\n",
|
||||
" values[\"author\"] = closest_valid_author\n",
|
||||
" return values"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -563,9 +566,9 @@
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"display_name": "poetry-venv-311",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
"name": "poetry-venv-311"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
@@ -577,7 +580,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.1"
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -115,7 +115,7 @@
|
||||
"source": [
|
||||
"from typing import List, Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Search(BaseModel):\n",
|
||||
|
||||
@@ -117,7 +117,7 @@
|
||||
"source": [
|
||||
"from typing import List, Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Search(BaseModel):\n",
|
||||
|
||||
@@ -117,7 +117,7 @@
|
||||
"source": [
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Search(BaseModel):\n",
|
||||
|
||||
@@ -136,7 +136,7 @@
|
||||
"source": [
|
||||
"from langchain_core.output_parsers.openai_tools import PydanticToolsParser\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Table(BaseModel):\n",
|
||||
|
||||
@@ -101,7 +101,7 @@
|
||||
"source": [
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Pydantic\n",
|
||||
@@ -667,7 +667,7 @@
|
||||
"\n",
|
||||
"from langchain_core.output_parsers import PydanticOutputParser\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Person(BaseModel):\n",
|
||||
@@ -794,7 +794,7 @@
|
||||
"\n",
|
||||
"from langchain_core.messages import AIMessage\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Person(BaseModel):\n",
|
||||
|
||||
@@ -112,7 +112,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class add(BaseModel):\n",
|
||||
@@ -321,7 +321,7 @@
|
||||
],
|
||||
"source": [
|
||||
"from langchain_core.output_parsers import PydanticToolsParser\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class add(BaseModel):\n",
|
||||
|
||||
@@ -418,8 +418,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_core.tools import BaseTool\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class UpdateFavoritePetsSchema(BaseModel):\n",
|
||||
|
||||
@@ -140,7 +140,7 @@
|
||||
"source": [
|
||||
"from langchain_community.tools import WikipediaQueryRun\n",
|
||||
"from langchain_community.utilities import WikipediaAPIWrapper\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class WikiInputs(BaseModel):\n",
|
||||
|
||||
@@ -274,7 +274,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class GetWeather(BaseModel):\n",
|
||||
|
||||
@@ -69,7 +69,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel\n",
|
||||
"from pydantic import BaseModel\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Person(BaseModel):\n",
|
||||
|
||||
@@ -123,8 +123,8 @@
|
||||
"from dotenv import find_dotenv, load_dotenv\n",
|
||||
"from langchain_community.chat_models import ChatDeepInfra\n",
|
||||
"from langchain_core.messages import HumanMessage\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel\n",
|
||||
"from langchain_core.tools import tool\n",
|
||||
"from pydantic import BaseModel\n",
|
||||
"\n",
|
||||
"model_name = \"meta-llama/Meta-Llama-3-70B-Instruct\"\n",
|
||||
"\n",
|
||||
|
||||
@@ -264,7 +264,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"llm = ChatEdenAI(provider=\"openai\", temperature=0.2, max_tokens=500)\n",
|
||||
"\n",
|
||||
|
||||
@@ -483,7 +483,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class GetWeather(BaseModel):\n",
|
||||
|
||||
@@ -226,8 +226,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_core.tools import tool\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class WeatherInput(BaseModel):\n",
|
||||
@@ -343,8 +343,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel\n",
|
||||
"from langchain_core.utils.function_calling import convert_to_openai_tool\n",
|
||||
"from pydantic import BaseModel\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Joke(BaseModel):\n",
|
||||
|
||||
@@ -658,8 +658,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import Field\n",
|
||||
"from langchain_core.tools import tool\n",
|
||||
"from pydantic import Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"@tool\n",
|
||||
|
||||
@@ -426,8 +426,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_core.tools import tool\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Define the schema for function arguments\n",
|
||||
|
||||
@@ -72,7 +72,7 @@
|
||||
"source": [
|
||||
"from enum import Enum\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Operation(Enum):\n",
|
||||
@@ -135,8 +135,8 @@
|
||||
"source": [
|
||||
"from pprint import pprint\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel\n",
|
||||
"from langchain_core.utils.function_calling import convert_pydantic_to_openai_function\n",
|
||||
"from pydantic import BaseModel\n",
|
||||
"\n",
|
||||
"openai_function_def = convert_pydantic_to_openai_function(Calculator)\n",
|
||||
"pprint(openai_function_def)"
|
||||
|
||||
@@ -286,7 +286,7 @@ In order to pass tools and let the LLM choose the tool it needs to call, we need
|
||||
|
||||
```python
|
||||
from langchain_core.tools import tool
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Define the schema for function arguments
|
||||
class OperationInput(BaseModel):
|
||||
|
||||
@@ -106,9 +106,9 @@
|
||||
"from langchain import hub\n",
|
||||
"from langchain.agents import AgentExecutor, create_openai_functions_agent\n",
|
||||
"from langchain_community.utilities.infobip import InfobipAPIWrapper\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_core.tools import StructuredTool\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"instructions = \"You are a coding teacher. You are teaching a student how to code. The student asks you a question. You answer the question.\"\n",
|
||||
"base_prompt = hub.pull(\"langchain-ai/openai-functions-template\")\n",
|
||||
|
||||
@@ -72,8 +72,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"tagging_prompt = ChatPromptTemplate.from_template(\n",
|
||||
" \"\"\"\n",
|
||||
|
||||
@@ -62,7 +62,6 @@
|
||||
"# dotenv.load_dotenv()\n",
|
||||
"\n",
|
||||
"from langchain.prompts import FewShotPromptTemplate, PromptTemplate\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel\n",
|
||||
"from langchain_experimental.tabular_synthetic_data.openai import (\n",
|
||||
" OPENAI_TEMPLATE,\n",
|
||||
" create_openai_data_generator,\n",
|
||||
@@ -71,7 +70,8 @@
|
||||
" SYNTHETIC_FEW_SHOT_PREFIX,\n",
|
||||
" SYNTHETIC_FEW_SHOT_SUFFIX,\n",
|
||||
")\n",
|
||||
"from langchain_openai import ChatOpenAI"
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"from pydantic import BaseModel"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -115,7 +115,7 @@
|
||||
"source": [
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Person(BaseModel):\n",
|
||||
@@ -167,7 +167,7 @@
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"# Define a custom prompt to provide instructions and any additional context.\n",
|
||||
"# 1) You can add examples into the prompt template to improve extraction quality\n",
|
||||
@@ -290,7 +290,7 @@
|
||||
"source": [
|
||||
"from typing import List, Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Person(BaseModel):\n",
|
||||
|
||||
@@ -376,7 +376,7 @@
|
||||
"source": [
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Search(BaseModel):\n",
|
||||
|
||||
@@ -7,17 +7,24 @@ echo "VERCEL_GIT_COMMIT_REF: $VERCEL_GIT_COMMIT_REF"
|
||||
if [ "$VERCEL_ENV" == "production" ] || [ "$VERCEL_GIT_COMMIT_REF" == "master" ] || [ "$VERCEL_GIT_COMMIT_REF" == "v0.1" ]; then
|
||||
echo "✅ Production build - proceeding with build"
|
||||
exit 1;
|
||||
else
|
||||
echo "Checking for changes in docs/"
|
||||
fi
|
||||
|
||||
# TODO: remove this for v0.3 launch
|
||||
# exit 0 if git commit ref is v0.3rc
|
||||
echo "🛑 v0.3rc build is ignored currently"
|
||||
exit 0;
|
||||
# end TODO
|
||||
|
||||
|
||||
echo "Checking for changes in docs/"
|
||||
echo "---"
|
||||
git log -n 50 --pretty=format:"%s" -- . | grep -v '(#'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "---"
|
||||
git log -n 50 --pretty=format:"%s" -- . | grep -v '(#'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "---"
|
||||
echo "✅ Changes detected in docs/ - proceeding with build"
|
||||
exit 1
|
||||
else
|
||||
echo "---"
|
||||
echo "🛑 No changes detected in docs/ - ignoring build"
|
||||
exit 0
|
||||
fi
|
||||
echo "✅ Changes detected in docs/ - proceeding with build"
|
||||
exit 1
|
||||
else
|
||||
echo "---"
|
||||
echo "🛑 No changes detected in docs/ - ignoring build"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
@@ -12,8 +12,8 @@ license = "MIT"
|
||||
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22__package_name_short__%3D%3D0%22&expanded=true"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.8.1,<4.0"
|
||||
langchain-core = "^0.2.0"
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.0.dev"
|
||||
|
||||
[tool.poetry.group.test]
|
||||
optional = true
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This script searches for lines starting with "import pydantic" or "from pydantic"
|
||||
# in tracked files within a Git repository.
|
||||
#
|
||||
# Usage: ./scripts/check_pydantic.sh /path/to/repository
|
||||
|
||||
# Check if a path argument is provided
|
||||
if [ $# -ne 1 ]; then
|
||||
echo "Usage: $0 /path/to/repository"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
repository_path="$1"
|
||||
|
||||
# Search for lines matching the pattern within the specified repository
|
||||
result=$(git -C "$repository_path" grep -E '^import pydantic|^from pydantic')
|
||||
|
||||
# Check if any matching lines were found
|
||||
if [ -n "$result" ]; then
|
||||
echo "ERROR: The following lines need to be updated:"
|
||||
echo "$result"
|
||||
echo "Please replace the code with an import from langchain_core.pydantic_v1."
|
||||
echo "For example, replace 'from pydantic import BaseModel'"
|
||||
echo "with 'from langchain_core.pydantic_v1 import BaseModel'"
|
||||
exit 1
|
||||
fi
|
||||
@@ -6,8 +6,8 @@ authors = []
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.8.1,<4.0"
|
||||
langchain-core = ">=0.1.5,<0.3"
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.0.dev"
|
||||
langchain-openai = ">=0.0.1"
|
||||
|
||||
|
||||
|
||||
401
libs/cli/poetry.lock
generated
401
libs/cli/poetry.lock
generated
@@ -11,9 +11,6 @@ files = [
|
||||
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.4.0"
|
||||
@@ -57,13 +54,13 @@ tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2024.7.4"
|
||||
version = "2024.8.30"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
|
||||
{file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
|
||||
{file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
|
||||
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -216,13 +213,13 @@ test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.112.1"
|
||||
version = "0.114.0"
|
||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "fastapi-0.112.1-py3-none-any.whl", hash = "sha256:bcbd45817fc2a1cd5da09af66815b84ec0d3d634eb173d1ab468ae3103e183e4"},
|
||||
{file = "fastapi-0.112.1.tar.gz", hash = "sha256:b2537146f8c23389a7faa8b03d0bd38d4986e6983874557d95eed2acc46448ef"},
|
||||
{file = "fastapi-0.114.0-py3-none-any.whl", hash = "sha256:fee75aa1b1d3d73f79851c432497e4394e413e1dece6234f68d3ce250d12760a"},
|
||||
{file = "fastapi-0.114.0.tar.gz", hash = "sha256:9908f2a5cc733004de6ca5e1412698f35085cefcbfd41d539245b9edf87b73c1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -231,8 +228,8 @@ starlette = ">=0.37.2,<0.39.0"
|
||||
typing-extensions = ">=4.8.0"
|
||||
|
||||
[package.extras]
|
||||
all = ["email_validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
|
||||
standard = ["email_validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"]
|
||||
all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
|
||||
standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "gitdb"
|
||||
@@ -300,13 +297,13 @@ trio = ["trio (>=0.22.0,<0.26.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.27.0"
|
||||
version = "0.27.2"
|
||||
description = "The next generation HTTP client."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"},
|
||||
{file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"},
|
||||
{file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"},
|
||||
{file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -321,40 +318,19 @@ brotli = ["brotli", "brotlicffi"]
|
||||
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
|
||||
http2 = ["h2 (>=3,<5)"]
|
||||
socks = ["socksio (==1.*)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.7"
|
||||
version = "3.8"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
|
||||
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
|
||||
{file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"},
|
||||
{file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "importlib-resources"
|
||||
version = "6.4.4"
|
||||
description = "Read resources from Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "importlib_resources-6.4.4-py3-none-any.whl", hash = "sha256:dda242603d1c9cd836c3368b1174ed74cb4049ecd209e7a1a0104620c18c5c11"},
|
||||
{file = "importlib_resources-6.4.4.tar.gz", hash = "sha256:20600c8b7361938dc0bb2d5ec0297802e575df486f5a544fa414da65e13721f7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
|
||||
cover = ["pytest-cov"]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
enabler = ["pytest-enabler (>=2.2)"]
|
||||
test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"]
|
||||
type = ["pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
@@ -404,9 +380,7 @@ files = [
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=22.2.0"
|
||||
importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
|
||||
jsonschema-specifications = ">=2023.03.6"
|
||||
pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""}
|
||||
referencing = ">=0.28.4"
|
||||
rpds-py = ">=0.7.1"
|
||||
|
||||
@@ -426,18 +400,17 @@ files = [
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
|
||||
referencing = ">=0.31.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.2.34"
|
||||
version = "0.2.38"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_core-0.2.34-py3-none-any.whl", hash = "sha256:c4fd158273e28cef758b4eccc956b424b76d4bb9117ce6014ae6eb2fb985801d"},
|
||||
{file = "langchain_core-0.2.34.tar.gz", hash = "sha256:50048d90b175c0d5a7e28164628b3c7f8c82b0dc2cd766a663d346a18d5c9eb2"},
|
||||
{file = "langchain_core-0.2.38-py3-none-any.whl", hash = "sha256:8a5729bc7e68b4af089af20eff44fe4e7ca21d0e0c87ec21cef7621981fd1a4a"},
|
||||
{file = "langchain_core-0.2.38.tar.gz", hash = "sha256:eb69dbedd344f2ee1f15bcea6c71a05884b867588fadc42d04632e727c1238f3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -454,13 +427,13 @@ typing-extensions = ">=4.7"
|
||||
|
||||
[[package]]
|
||||
name = "langserve"
|
||||
version = "0.2.2"
|
||||
version = "0.2.3"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = "<4.0.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langserve-0.2.2-py3-none-any.whl", hash = "sha256:e2b1b4b5b6108a82a38e5a54468737a07ca21f448174ec594992bc6d215a14f5"},
|
||||
{file = "langserve-0.2.2.tar.gz", hash = "sha256:8df558c157718963c647c6f7ec302f75811c88585f1f3a34d65662e1962c1957"},
|
||||
{file = "langserve-0.2.3-py3-none-any.whl", hash = "sha256:9ded64f47b967337a0ec236563bd0ca0b3c40ee24805b6c1b724fe144ac1dc18"},
|
||||
{file = "langserve-0.2.3.tar.gz", hash = "sha256:50b4eedbc4865483154c3f65f7cc9a4d4cf983efd6c96a8624ab5a9abddcb466"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -479,13 +452,13 @@ server = ["fastapi (>=0.90.1,<1)", "sse-starlette (>=1.3.0,<2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.1.101"
|
||||
version = "0.1.115"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langsmith-0.1.101-py3-none-any.whl", hash = "sha256:572e2c90709cda1ad837ac86cedda7295f69933f2124c658a92a35fb890477cc"},
|
||||
{file = "langsmith-0.1.101.tar.gz", hash = "sha256:caf4d95f314bb6cd3c4e0632eed821fd5cd5d0f18cb824772fce6d7a9113895b"},
|
||||
{file = "langsmith-0.1.115-py3-none-any.whl", hash = "sha256:04e35cfd4c2d4ff1ea10bb577ff43957b05ebb3d9eb4e06e200701f4a2b4ac9f"},
|
||||
{file = "langsmith-0.1.115.tar.gz", hash = "sha256:3b775377d858d32354f3ee0dd1ed637068cfe9a1f13e7b3bfa82db1615cdffc9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -660,17 +633,6 @@ files = [
|
||||
{file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkgutil-resolve-name"
|
||||
version = "1.3.10"
|
||||
description = "Resolve a name to an object."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"},
|
||||
{file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.5.0"
|
||||
@@ -706,122 +668,123 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.8.2"
|
||||
version = "2.9.0"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"},
|
||||
{file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"},
|
||||
{file = "pydantic-2.9.0-py3-none-any.whl", hash = "sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370"},
|
||||
{file = "pydantic-2.9.0.tar.gz", hash = "sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.4.0"
|
||||
pydantic-core = "2.20.1"
|
||||
pydantic-core = "2.23.2"
|
||||
typing-extensions = [
|
||||
{version = ">=4.6.1", markers = "python_version < \"3.13\""},
|
||||
{version = ">=4.12.2", markers = "python_version >= \"3.13\""},
|
||||
]
|
||||
tzdata = {version = "*", markers = "python_version >= \"3.9\""}
|
||||
|
||||
[package.extras]
|
||||
email = ["email-validator (>=2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.20.1"
|
||||
version = "2.23.2"
|
||||
description = "Core functionality for Pydantic validation and serialization"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"},
|
||||
{file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"},
|
||||
{file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"},
|
||||
{file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"},
|
||||
{file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"},
|
||||
{file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"},
|
||||
{file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"},
|
||||
{file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"},
|
||||
{file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"},
|
||||
{file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"},
|
||||
{file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"},
|
||||
{file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"},
|
||||
{file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"},
|
||||
{file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"},
|
||||
{file = "pydantic_core-2.23.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece"},
|
||||
{file = "pydantic_core-2.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7"},
|
||||
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78"},
|
||||
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c"},
|
||||
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e"},
|
||||
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622"},
|
||||
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb"},
|
||||
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc"},
|
||||
{file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354"},
|
||||
{file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2"},
|
||||
{file = "pydantic_core-2.23.2-cp310-none-win32.whl", hash = "sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854"},
|
||||
{file = "pydantic_core-2.23.2-cp310-none-win_amd64.whl", hash = "sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a"},
|
||||
{file = "pydantic_core-2.23.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8"},
|
||||
{file = "pydantic_core-2.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2"},
|
||||
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178"},
|
||||
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515"},
|
||||
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd"},
|
||||
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce"},
|
||||
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f"},
|
||||
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57"},
|
||||
{file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4"},
|
||||
{file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa"},
|
||||
{file = "pydantic_core-2.23.2-cp311-none-win32.whl", hash = "sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576"},
|
||||
{file = "pydantic_core-2.23.2-cp311-none-win_amd64.whl", hash = "sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589"},
|
||||
{file = "pydantic_core-2.23.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec"},
|
||||
{file = "pydantic_core-2.23.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43"},
|
||||
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41"},
|
||||
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad"},
|
||||
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49"},
|
||||
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81"},
|
||||
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f"},
|
||||
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0"},
|
||||
{file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73"},
|
||||
{file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0"},
|
||||
{file = "pydantic_core-2.23.2-cp312-none-win32.whl", hash = "sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f"},
|
||||
{file = "pydantic_core-2.23.2-cp312-none-win_amd64.whl", hash = "sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342"},
|
||||
{file = "pydantic_core-2.23.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac"},
|
||||
{file = "pydantic_core-2.23.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2"},
|
||||
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0"},
|
||||
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b"},
|
||||
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30"},
|
||||
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703"},
|
||||
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960"},
|
||||
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604"},
|
||||
{file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d"},
|
||||
{file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced"},
|
||||
{file = "pydantic_core-2.23.2-cp313-none-win32.whl", hash = "sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1"},
|
||||
{file = "pydantic_core-2.23.2-cp313-none-win_amd64.whl", hash = "sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac"},
|
||||
{file = "pydantic_core-2.23.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100"},
|
||||
{file = "pydantic_core-2.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa"},
|
||||
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b"},
|
||||
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472"},
|
||||
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7"},
|
||||
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af"},
|
||||
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a"},
|
||||
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f"},
|
||||
{file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501"},
|
||||
{file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5"},
|
||||
{file = "pydantic_core-2.23.2-cp38-none-win32.whl", hash = "sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf"},
|
||||
{file = "pydantic_core-2.23.2-cp38-none-win_amd64.whl", hash = "sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8"},
|
||||
{file = "pydantic_core-2.23.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59"},
|
||||
{file = "pydantic_core-2.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87"},
|
||||
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123"},
|
||||
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5"},
|
||||
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae"},
|
||||
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69"},
|
||||
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79"},
|
||||
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c"},
|
||||
{file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80"},
|
||||
{file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6"},
|
||||
{file = "pydantic_core-2.23.2-cp39-none-win32.whl", hash = "sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437"},
|
||||
{file = "pydantic_core-2.23.2-cp39-none-win_amd64.whl", hash = "sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e"},
|
||||
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc"},
|
||||
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6"},
|
||||
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e"},
|
||||
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2"},
|
||||
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae"},
|
||||
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936"},
|
||||
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940"},
|
||||
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44"},
|
||||
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e"},
|
||||
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329"},
|
||||
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465"},
|
||||
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474"},
|
||||
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6"},
|
||||
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac"},
|
||||
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653"},
|
||||
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2"},
|
||||
{file = "pydantic_core-2.23.2.tar.gz", hash = "sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -996,19 +959,18 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "13.7.1"
|
||||
version = "13.8.0"
|
||||
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"},
|
||||
{file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"},
|
||||
{file = "rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc"},
|
||||
{file = "rich-13.8.0.tar.gz", hash = "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
markdown-it-py = ">=2.2.0"
|
||||
pygments = ">=2.13.0,<3.0.0"
|
||||
typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
|
||||
|
||||
[package.extras]
|
||||
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
@@ -1154,19 +1116,23 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "73.0.1"
|
||||
version = "74.1.2"
|
||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"},
|
||||
{file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"},
|
||||
{file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"},
|
||||
{file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"]
|
||||
core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
|
||||
cover = ["pytest-cov"]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
|
||||
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
|
||||
enabler = ["pytest-enabler (>=2.2)"]
|
||||
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
|
||||
type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "shellingham"
|
||||
@@ -1220,13 +1186,13 @@ uvicorn = "*"
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.38.2"
|
||||
version = "0.38.4"
|
||||
description = "The little ASGI library that shines."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "starlette-0.38.2-py3-none-any.whl", hash = "sha256:4ec6a59df6bbafdab5f567754481657f7ed90dc9d69b0c9ff017907dd54faeff"},
|
||||
{file = "starlette-0.38.2.tar.gz", hash = "sha256:c7c0441065252160993a1a37cf2a73bb64d271b17303e0b0c1eb7191cfb12d75"},
|
||||
{file = "starlette-0.38.4-py3-none-any.whl", hash = "sha256:526f53a77f0e43b85f583438aee1a940fd84f8fd610353e8b0c1a77ad8a87e76"},
|
||||
{file = "starlette-0.38.4.tar.gz", hash = "sha256:53a7439060304a208fea17ed407e998f46da5e5d9b1addfea3040094512a6379"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1319,6 +1285,17 @@ files = [
|
||||
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tzdata"
|
||||
version = "2024.1"
|
||||
description = "Provider of IANA time zone data"
|
||||
optional = false
|
||||
python-versions = ">=2"
|
||||
files = [
|
||||
{file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
|
||||
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.2.2"
|
||||
@@ -1357,46 +1334,41 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)",
|
||||
|
||||
[[package]]
|
||||
name = "watchdog"
|
||||
version = "4.0.2"
|
||||
version = "5.0.2"
|
||||
description = "Filesystem events monitoring"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"},
|
||||
{file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"},
|
||||
{file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"},
|
||||
{file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"},
|
||||
{file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"},
|
||||
{file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"},
|
||||
{file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"},
|
||||
{file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"},
|
||||
{file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"},
|
||||
{file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"},
|
||||
{file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"},
|
||||
{file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"},
|
||||
{file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"},
|
||||
{file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"},
|
||||
{file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"},
|
||||
{file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"},
|
||||
{file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"},
|
||||
{file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"},
|
||||
{file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"},
|
||||
{file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"},
|
||||
{file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"},
|
||||
{file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"},
|
||||
{file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"},
|
||||
{file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"},
|
||||
{file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"},
|
||||
{file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"},
|
||||
{file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"},
|
||||
{file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"},
|
||||
{file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"},
|
||||
{file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"},
|
||||
{file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"},
|
||||
{file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"},
|
||||
{file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"},
|
||||
{file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"},
|
||||
{file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"},
|
||||
{file = "watchdog-5.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d961f4123bb3c447d9fcdcb67e1530c366f10ab3a0c7d1c0c9943050936d4877"},
|
||||
{file = "watchdog-5.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72990192cb63872c47d5e5fefe230a401b87fd59d257ee577d61c9e5564c62e5"},
|
||||
{file = "watchdog-5.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6bec703ad90b35a848e05e1b40bf0050da7ca28ead7ac4be724ae5ac2653a1a0"},
|
||||
{file = "watchdog-5.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dae7a1879918f6544201d33666909b040a46421054a50e0f773e0d870ed7438d"},
|
||||
{file = "watchdog-5.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c4a440f725f3b99133de610bfec93d570b13826f89616377715b9cd60424db6e"},
|
||||
{file = "watchdog-5.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8b2918c19e0d48f5f20df458c84692e2a054f02d9df25e6c3c930063eca64c1"},
|
||||
{file = "watchdog-5.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:aa9cd6e24126d4afb3752a3e70fce39f92d0e1a58a236ddf6ee823ff7dba28ee"},
|
||||
{file = "watchdog-5.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f627c5bf5759fdd90195b0c0431f99cff4867d212a67b384442c51136a098ed7"},
|
||||
{file = "watchdog-5.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d7594a6d32cda2b49df3fd9abf9b37c8d2f3eab5df45c24056b4a671ac661619"},
|
||||
{file = "watchdog-5.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba32efcccfe2c58f4d01115440d1672b4eb26cdd6fc5b5818f1fb41f7c3e1889"},
|
||||
{file = "watchdog-5.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:963f7c4c91e3f51c998eeff1b3fb24a52a8a34da4f956e470f4b068bb47b78ee"},
|
||||
{file = "watchdog-5.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8c47150aa12f775e22efff1eee9f0f6beee542a7aa1a985c271b1997d340184f"},
|
||||
{file = "watchdog-5.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:14dd4ed023d79d1f670aa659f449bcd2733c33a35c8ffd88689d9d243885198b"},
|
||||
{file = "watchdog-5.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b84bff0391ad4abe25c2740c7aec0e3de316fdf7764007f41e248422a7760a7f"},
|
||||
{file = "watchdog-5.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e8d5ff39f0a9968952cce548e8e08f849141a4fcc1290b1c17c032ba697b9d7"},
|
||||
{file = "watchdog-5.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fb223456db6e5f7bd9bbd5cd969f05aae82ae21acc00643b60d81c770abd402b"},
|
||||
{file = "watchdog-5.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9814adb768c23727a27792c77812cf4e2fd9853cd280eafa2bcfa62a99e8bd6e"},
|
||||
{file = "watchdog-5.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:901ee48c23f70193d1a7bc2d9ee297df66081dd5f46f0ca011be4f70dec80dab"},
|
||||
{file = "watchdog-5.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:638bcca3d5b1885c6ec47be67bf712b00a9ab3d4b22ec0881f4889ad870bc7e8"},
|
||||
{file = "watchdog-5.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5597c051587f8757798216f2485e85eac583c3b343e9aa09127a3a6f82c65ee8"},
|
||||
{file = "watchdog-5.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:53ed1bf71fcb8475dd0ef4912ab139c294c87b903724b6f4a8bd98e026862e6d"},
|
||||
{file = "watchdog-5.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:29e4a2607bd407d9552c502d38b45a05ec26a8e40cc7e94db9bb48f861fa5abc"},
|
||||
{file = "watchdog-5.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:b6dc8f1d770a8280997e4beae7b9a75a33b268c59e033e72c8a10990097e5fde"},
|
||||
{file = "watchdog-5.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:d2ab34adc9bf1489452965cdb16a924e97d4452fcf88a50b21859068b50b5c3b"},
|
||||
{file = "watchdog-5.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:7d1aa7e4bb0f0c65a1a91ba37c10e19dabf7eaaa282c5787e51371f090748f4b"},
|
||||
{file = "watchdog-5.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:726eef8f8c634ac6584f86c9c53353a010d9f311f6c15a034f3800a7a891d941"},
|
||||
{file = "watchdog-5.0.2-py3-none-win32.whl", hash = "sha256:bda40c57115684d0216556671875e008279dea2dc00fcd3dde126ac8e0d7a2fb"},
|
||||
{file = "watchdog-5.0.2-py3-none-win_amd64.whl", hash = "sha256:d010be060c996db725fbce7e3ef14687cdcc76f4ca0e4339a68cc4532c382a73"},
|
||||
{file = "watchdog-5.0.2-py3-none-win_ia64.whl", hash = "sha256:3960136b2b619510569b90f0cd96408591d6c251a75c97690f4553ca88889769"},
|
||||
{file = "watchdog-5.0.2.tar.gz", hash = "sha256:dcebf7e475001d2cdeb020be630dc5b687e9acdd60d16fea6bb4508e7b94cf76"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -1416,25 +1388,10 @@ files = [
|
||||
[package.extras]
|
||||
test = ["pytest (>=6.0.0)", "setuptools (>=65)"]
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.20.0"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"},
|
||||
{file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
|
||||
|
||||
[extras]
|
||||
serve = []
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
content-hash = "f549b3468a0b27c75b171c3a4efd8df9c3b3ae737c7e097ffc3fb6fb0fe5f2ef"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "d7ef8a78c84458975d2ff479af00f4bde06e77f25f8306c64aef5bdb34f34798"
|
||||
|
||||
@@ -12,7 +12,7 @@ license = "MIT"
|
||||
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-cli%3D%3D0%22&expanded=true"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.8.1,<4.0"
|
||||
python = ">=3.9,<4.0"
|
||||
typer = { extras = ["all"], version = "^0.9.0" }
|
||||
gitpython = "^3.1.40"
|
||||
langserve = { extras = ["all"], version = ">=0.0.51" }
|
||||
|
||||
@@ -22,7 +22,7 @@ integration_tests:
|
||||
poetry run pytest $(TEST_FILE)
|
||||
|
||||
test_watch:
|
||||
poetry run ptw --disable-socket --allow-unix-socket --snapshot-update --now . -- -vv -x tests/unit_tests
|
||||
poetry run ptw --disable-socket --allow-unix-socket --snapshot-update --now . -- -vv tests/unit_tests
|
||||
|
||||
check_imports: $(shell find langchain_community -name '*.py')
|
||||
poetry run python ./scripts/check_imports.py $^
|
||||
@@ -45,7 +45,6 @@ lint_tests: PYTHON_FILES=tests
|
||||
lint_tests: MYPY_CACHE=.mypy_cache_test
|
||||
|
||||
lint lint_diff lint_package lint_tests:
|
||||
./scripts/check_pydantic.sh .
|
||||
./scripts/lint_imports.sh .
|
||||
./scripts/check_pickle.sh .
|
||||
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check $(PYTHON_FILES)
|
||||
|
||||
@@ -34,7 +34,6 @@ hologres-vector==0.0.6
|
||||
html2text>=2020.1.16
|
||||
httpx>=0.24.1,<0.25
|
||||
httpx-sse>=0.4.0,<0.5
|
||||
javelin-sdk>=0.1.8,<0.2
|
||||
jinja2>=3,<4
|
||||
jq>=1.4.1,<2
|
||||
jsonschema>1
|
||||
@@ -60,6 +59,7 @@ pgvector>=0.1.6,<0.2
|
||||
praw>=7.7.1,<8
|
||||
premai>=0.3.25,<0.4
|
||||
psychicapi>=0.8.0,<0.9
|
||||
pydantic>=2.7.4,<3
|
||||
py-trello>=0.19.0,<0.20
|
||||
pyjwt>=2.8.0,<3
|
||||
pymupdf>=1.22.3,<2
|
||||
|
||||
@@ -25,7 +25,7 @@ from langchain_core.messages import (
|
||||
SystemMessage,
|
||||
ToolMessage,
|
||||
)
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from pydantic import BaseModel
|
||||
from typing_extensions import Literal
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, List, Literal, Optional
|
||||
from typing import TYPE_CHECKING, Any, List, Literal, Optional
|
||||
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict, model_validator
|
||||
|
||||
from langchain_community.tools.ainetwork.app import AINAppOps
|
||||
from langchain_community.tools.ainetwork.owner import AINOwnerOps
|
||||
@@ -36,8 +36,9 @@ class AINetworkToolkit(BaseToolkit):
|
||||
network: Optional[Literal["mainnet", "testnet"]] = "testnet"
|
||||
interface: Optional[Ain] = None
|
||||
|
||||
@root_validator(pre=True)
|
||||
def set_interface(cls, values: dict) -> dict:
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def set_interface(cls, values: dict) -> Any:
|
||||
"""Set the interface if not provided.
|
||||
|
||||
If the interface is not provided, attempt to authenticate with the
|
||||
@@ -53,9 +54,10 @@ class AINetworkToolkit(BaseToolkit):
|
||||
values["interface"] = authenticate(network=values.get("network", "testnet"))
|
||||
return values
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
validate_all = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
validate_default=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -3,9 +3,9 @@ from __future__ import annotations
|
||||
from typing import TYPE_CHECKING, List, Optional
|
||||
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict, Field
|
||||
|
||||
from langchain_community.tools.amadeus.closest_airport import AmadeusClosestAirport
|
||||
from langchain_community.tools.amadeus.flight_search import AmadeusFlightSearch
|
||||
@@ -26,8 +26,9 @@ class AmadeusToolkit(BaseToolkit):
|
||||
client: Client = Field(default_factory=authenticate)
|
||||
llm: Optional[BaseLanguageModel] = Field(default=None)
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
from typing import List
|
||||
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict, Field
|
||||
|
||||
from langchain_community.tools.cassandra_database.tool import (
|
||||
GetSchemaCassandraDatabaseTool,
|
||||
@@ -24,8 +24,9 @@ class CassandraDatabaseToolkit(BaseToolkit):
|
||||
|
||||
db: CassandraDatabase = Field(exclude=True)
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from typing import List
|
||||
from typing import Any, List
|
||||
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import model_validator
|
||||
|
||||
from langchain_community.tools.connery import ConneryService
|
||||
|
||||
@@ -23,8 +23,9 @@ class ConneryToolkit(BaseToolkit):
|
||||
"""
|
||||
return self.tools
|
||||
|
||||
@root_validator(pre=True)
|
||||
def validate_attributes(cls, values: dict) -> dict:
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_attributes(cls, values: dict) -> Any:
|
||||
"""
|
||||
Validate the attributes of the ConneryToolkit class.
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict, List, Optional, Type
|
||||
from typing import Any, Dict, List, Optional, Type
|
||||
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.tools import BaseTool, BaseToolkit
|
||||
from langchain_core.utils.pydantic import get_fields
|
||||
from pydantic import model_validator
|
||||
|
||||
from langchain_community.tools.file_management.copy import CopyFileTool
|
||||
from langchain_community.tools.file_management.delete import DeleteFileTool
|
||||
@@ -63,8 +63,9 @@ class FileManagementToolkit(BaseToolkit):
|
||||
selected_tools: Optional[List[str]] = None
|
||||
"""If provided, only provide the selected tools. Defaults to all."""
|
||||
|
||||
@root_validator(pre=True)
|
||||
def validate_tools(cls, values: dict) -> dict:
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_tools(cls, values: dict) -> Any:
|
||||
selected_tools = values.get("selected_tools") or []
|
||||
for tool_name in selected_tools:
|
||||
if tool_name not in _FILE_TOOLS_MAP:
|
||||
|
||||
@@ -2,9 +2,9 @@ from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict, Field
|
||||
|
||||
from langchain_community.tools.financial_datasets.balance_sheets import BalanceSheets
|
||||
from langchain_community.tools.financial_datasets.cash_flow_statements import (
|
||||
@@ -31,8 +31,9 @@ class FinancialDatasetsToolkit(BaseToolkit):
|
||||
super().__init__()
|
||||
self.api_wrapper = api_wrapper
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
from typing import Dict, List
|
||||
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from langchain_community.tools.github.prompt import (
|
||||
COMMENT_ON_ISSUE_PROMPT,
|
||||
|
||||
@@ -2,9 +2,9 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, List
|
||||
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict, Field
|
||||
|
||||
from langchain_community.tools.gmail.create_draft import GmailCreateDraft
|
||||
from langchain_community.tools.gmail.get_message import GmailGetMessage
|
||||
@@ -117,8 +117,9 @@ class GmailToolkit(BaseToolkit):
|
||||
|
||||
api_resource: Resource = Field(default_factory=build_resource_service)
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -6,6 +6,7 @@ from typing import List
|
||||
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict
|
||||
|
||||
from langchain_community.tools.multion.close_session import MultionCloseSession
|
||||
from langchain_community.tools.multion.create_session import MultionCreateSession
|
||||
@@ -25,8 +26,9 @@ class MultionToolkit(BaseToolkit):
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -3,9 +3,9 @@ from __future__ import annotations
|
||||
from typing import Any, List, Optional, Sequence
|
||||
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.agent_toolkits.nla.tool import NLATool
|
||||
from langchain_community.tools.openapi.utils.openapi_utils import OpenAPISpec
|
||||
|
||||
@@ -2,9 +2,9 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, List
|
||||
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict, Field
|
||||
|
||||
from langchain_community.tools.office365.create_draft_message import (
|
||||
O365CreateDraftMessage,
|
||||
@@ -40,8 +40,9 @@ class O365Toolkit(BaseToolkit):
|
||||
|
||||
account: Account = Field(default_factory=authenticate)
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -9,8 +9,8 @@ import yaml
|
||||
from langchain_core.callbacks import BaseCallbackManager
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts import BasePromptTemplate, PromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool, Tool
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.agent_toolkits.openapi.planner_prompt import (
|
||||
API_CONTROLLER_PROMPT,
|
||||
@@ -69,7 +69,7 @@ class RequestsGetToolWithParsing(BaseRequestsTool, BaseTool):
|
||||
|
||||
name: str = "requests_get"
|
||||
"""Tool name."""
|
||||
description = REQUESTS_GET_TOOL_DESCRIPTION
|
||||
description: str = REQUESTS_GET_TOOL_DESCRIPTION
|
||||
"""Tool description."""
|
||||
response_length: int = MAX_RESPONSE_LENGTH
|
||||
"""Maximum length of the response to be returned."""
|
||||
@@ -103,7 +103,7 @@ class RequestsPostToolWithParsing(BaseRequestsTool, BaseTool):
|
||||
|
||||
name: str = "requests_post"
|
||||
"""Tool name."""
|
||||
description = REQUESTS_POST_TOOL_DESCRIPTION
|
||||
description: str = REQUESTS_POST_TOOL_DESCRIPTION
|
||||
"""Tool description."""
|
||||
response_length: int = MAX_RESPONSE_LENGTH
|
||||
"""Maximum length of the response to be returned."""
|
||||
@@ -134,7 +134,7 @@ class RequestsPatchToolWithParsing(BaseRequestsTool, BaseTool):
|
||||
|
||||
name: str = "requests_patch"
|
||||
"""Tool name."""
|
||||
description = REQUESTS_PATCH_TOOL_DESCRIPTION
|
||||
description: str = REQUESTS_PATCH_TOOL_DESCRIPTION
|
||||
"""Tool description."""
|
||||
response_length: int = MAX_RESPONSE_LENGTH
|
||||
"""Maximum length of the response to be returned."""
|
||||
@@ -167,7 +167,7 @@ class RequestsPutToolWithParsing(BaseRequestsTool, BaseTool):
|
||||
|
||||
name: str = "requests_put"
|
||||
"""Tool name."""
|
||||
description = REQUESTS_PUT_TOOL_DESCRIPTION
|
||||
description: str = REQUESTS_PUT_TOOL_DESCRIPTION
|
||||
"""Tool description."""
|
||||
response_length: int = MAX_RESPONSE_LENGTH
|
||||
"""Maximum length of the response to be returned."""
|
||||
@@ -198,7 +198,7 @@ class RequestsDeleteToolWithParsing(BaseRequestsTool, BaseTool):
|
||||
|
||||
name: str = "requests_delete"
|
||||
"""The name of the tool."""
|
||||
description = REQUESTS_DELETE_TOOL_DESCRIPTION
|
||||
description: str = REQUESTS_DELETE_TOOL_DESCRIPTION
|
||||
"""The description of the tool."""
|
||||
|
||||
response_length: Optional[int] = MAX_RESPONSE_LENGTH
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, List, Optional, Type, cast
|
||||
from typing import TYPE_CHECKING, Any, List, Optional, Type, cast
|
||||
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.tools import BaseTool, BaseToolkit
|
||||
from pydantic import ConfigDict, model_validator
|
||||
|
||||
from langchain_community.tools.playwright.base import (
|
||||
BaseBrowserTool,
|
||||
@@ -68,12 +68,14 @@ class PlayWrightBrowserToolkit(BaseToolkit):
|
||||
sync_browser: Optional["SyncBrowser"] = None
|
||||
async_browser: Optional["AsyncBrowser"] = None
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
extra = "forbid"
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
extra="forbid",
|
||||
)
|
||||
|
||||
@root_validator(pre=True)
|
||||
def validate_imports_and_browser_provided(cls, values: dict) -> dict:
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_imports_and_browser_provided(cls, values: dict) -> Any:
|
||||
"""Check that the arguments are valid."""
|
||||
lazy_import_playwright_browsers()
|
||||
if values.get("async_browser") is None and values.get("sync_browser") is None:
|
||||
|
||||
@@ -13,9 +13,9 @@ from langchain_core.prompts.chat import (
|
||||
HumanMessagePromptTemplate,
|
||||
SystemMessagePromptTemplate,
|
||||
)
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict, Field
|
||||
|
||||
from langchain_community.tools.powerbi.prompt import (
|
||||
QUESTION_TO_QUERY_BASE,
|
||||
@@ -63,8 +63,9 @@ class PowerBIToolkit(BaseToolkit):
|
||||
output_token_limit: Optional[int] = None
|
||||
tiktoken_model_name: Optional[str] = None
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -2,9 +2,9 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, List
|
||||
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict, Field
|
||||
|
||||
from langchain_community.tools.slack.get_channel import SlackGetChannel
|
||||
from langchain_community.tools.slack.get_message import SlackGetMessage
|
||||
@@ -91,8 +91,9 @@ class SlackToolkit(BaseToolkit):
|
||||
|
||||
client: WebClient = Field(default_factory=login)
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
from typing import List
|
||||
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict, Field
|
||||
|
||||
from langchain_community.tools.spark_sql.tool import (
|
||||
InfoSparkSQLTool,
|
||||
@@ -27,8 +27,9 @@ class SparkSQLToolkit(BaseToolkit):
|
||||
db: SparkSQL = Field(exclude=True)
|
||||
llm: BaseLanguageModel = Field(exclude=True)
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
from typing import List
|
||||
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.tools.base import BaseToolkit
|
||||
from pydantic import ConfigDict, Field
|
||||
|
||||
from langchain_community.tools.sql_database.tool import (
|
||||
InfoSQLDatabaseTool,
|
||||
@@ -83,8 +83,9 @@ class SQLDatabaseToolkit(BaseToolkit):
|
||||
"""Return string representation of SQL dialect to use."""
|
||||
return self.db.dialect
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
|
||||
@@ -15,10 +15,11 @@ from langchain.agents.openai_assistant.base import OpenAIAssistantRunnable, Outp
|
||||
from langchain_core._api import beta
|
||||
from langchain_core.callbacks import CallbackManager
|
||||
from langchain_core.load import dumpd
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.runnables import RunnableConfig, ensure_config
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils.function_calling import convert_to_openai_tool
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
from typing_extensions import Self
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import openai
|
||||
@@ -209,14 +210,14 @@ class OpenAIAssistantV2Runnable(OpenAIAssistantRunnable):
|
||||
as_agent: bool = False
|
||||
"""Use as a LangChain agent, compatible with the AgentExecutor."""
|
||||
|
||||
@root_validator(pre=False, skip_on_failure=True)
|
||||
def validate_async_client(cls, values: dict) -> dict:
|
||||
if values["async_client"] is None:
|
||||
@model_validator(mode="after")
|
||||
def validate_async_client(self) -> Self:
|
||||
if self.async_client is None:
|
||||
import openai
|
||||
|
||||
api_key = values["client"].api_key
|
||||
values["async_client"] = openai.AsyncOpenAI(api_key=api_key)
|
||||
return values
|
||||
api_key = self.client.api_key
|
||||
self.async_client = openai.AsyncOpenAI(api_key=api_key)
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def create_assistant(
|
||||
|
||||
@@ -22,9 +22,9 @@ from langchain_core.output_parsers import (
|
||||
BaseOutputParser,
|
||||
)
|
||||
from langchain_core.prompts import BasePromptTemplate
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.runnables import Runnable
|
||||
from langchain_core.utils.pydantic import is_basemodel_subclass
|
||||
from pydantic import BaseModel
|
||||
|
||||
from langchain_community.output_parsers.ernie_functions import (
|
||||
JsonOutputFunctionsParser,
|
||||
@@ -243,7 +243,7 @@ def create_ernie_fn_runnable(
|
||||
from langchain.chains.ernie_functions import create_ernie_fn_chain
|
||||
from langchain_community.chat_models import ErnieBotChat
|
||||
from langchain_core.prompts import ChatPromptTemplate
|
||||
from langchain.pydantic_v1 import BaseModel, Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class RecordPerson(BaseModel):
|
||||
@@ -317,7 +317,7 @@ def create_structured_output_runnable(
|
||||
from langchain.chains.ernie_functions import create_structured_output_chain
|
||||
from langchain_community.chat_models import ErnieBotChat
|
||||
from langchain_core.prompts import ChatPromptTemplate
|
||||
from langchain.pydantic_v1 import BaseModel, Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
class Dog(BaseModel):
|
||||
\"\"\"Identifying information about a dog.\"\"\"
|
||||
@@ -415,7 +415,7 @@ def create_ernie_fn_chain(
|
||||
from langchain_community.chat_models import ErnieBotChat
|
||||
from langchain_core.prompts import ChatPromptTemplate
|
||||
|
||||
from langchain.pydantic_v1 import BaseModel, Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class RecordPerson(BaseModel):
|
||||
@@ -502,7 +502,7 @@ def create_structured_output_chain(
|
||||
from langchain_community.chat_models import ErnieBotChat
|
||||
from langchain_core.prompts import ChatPromptTemplate
|
||||
|
||||
from langchain.pydantic_v1 import BaseModel, Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
class Dog(BaseModel):
|
||||
\"\"\"Identifying information about a dog.\"\"\"
|
||||
|
||||
@@ -10,7 +10,7 @@ from langchain.chains.llm import LLMChain
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts import BasePromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import (
|
||||
AQL_FIX_PROMPT,
|
||||
@@ -57,6 +57,37 @@ class ArangoGraphQAChain(Chain):
|
||||
# Specify the maximum amount of AQL Generation attempts that should be made
|
||||
max_aql_generation_attempts: int = 3
|
||||
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
return [self.input_key]
|
||||
|
||||
@@ -9,7 +9,7 @@ from langchain.chains.llm import LLMChain
|
||||
from langchain_core.callbacks.manager import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts import BasePromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import (
|
||||
ENTITY_EXTRACTION_PROMPT,
|
||||
|
||||
@@ -22,8 +22,8 @@ from langchain_core.prompts import (
|
||||
HumanMessagePromptTemplate,
|
||||
MessagesPlaceholder,
|
||||
)
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.runnables import Runnable
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.cypher_utils import (
|
||||
CypherQueryCorrector,
|
||||
@@ -180,6 +180,36 @@ class GraphCypherQAChain(Chain):
|
||||
"""Optional cypher validation tool"""
|
||||
use_function_response: bool = False
|
||||
"""Whether to wrap the database context as tool/function response"""
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
|
||||
@@ -10,7 +10,7 @@ from langchain.chains.llm import LLMChain
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts import BasePromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import (
|
||||
CYPHER_GENERATION_PROMPT,
|
||||
@@ -66,6 +66,37 @@ class FalkorDBQAChain(Chain):
|
||||
return_direct: bool = False
|
||||
"""Whether or not to return the result of querying the graph directly."""
|
||||
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
"""Return the input keys.
|
||||
|
||||
@@ -10,7 +10,7 @@ from langchain_core.callbacks.manager import CallbackManager, CallbackManagerFor
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts import BasePromptTemplate
|
||||
from langchain_core.prompts.prompt import PromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import (
|
||||
CYPHER_QA_PROMPT,
|
||||
@@ -63,6 +63,37 @@ class GremlinQAChain(Chain):
|
||||
return_direct: bool = False
|
||||
return_intermediate_steps: bool = False
|
||||
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
"""Input keys.
|
||||
|
||||
@@ -9,7 +9,7 @@ from langchain.chains.llm import LLMChain
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts import BasePromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import (
|
||||
CYPHER_QA_PROMPT,
|
||||
@@ -39,6 +39,37 @@ class HugeGraphQAChain(Chain):
|
||||
input_key: str = "query" #: :meta private:
|
||||
output_key: str = "result" #: :meta private:
|
||||
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
"""Input keys.
|
||||
|
||||
@@ -10,7 +10,7 @@ from langchain.chains.llm import LLMChain
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts import BasePromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import (
|
||||
CYPHER_QA_PROMPT,
|
||||
@@ -73,6 +73,37 @@ class KuzuQAChain(Chain):
|
||||
input_key: str = "query" #: :meta private:
|
||||
output_key: str = "result" #: :meta private:
|
||||
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
"""Return the input keys.
|
||||
|
||||
@@ -9,7 +9,7 @@ from langchain.chains.llm import LLMChain
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts import BasePromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import (
|
||||
CYPHER_QA_PROMPT,
|
||||
@@ -39,6 +39,37 @@ class NebulaGraphQAChain(Chain):
|
||||
input_key: str = "query" #: :meta private:
|
||||
output_key: str = "result" #: :meta private:
|
||||
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
"""Return the input keys.
|
||||
|
||||
@@ -9,7 +9,7 @@ from langchain.chains.prompt_selector import ConditionalPromptSelector
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts.base import BasePromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import (
|
||||
CYPHER_QA_PROMPT,
|
||||
@@ -120,6 +120,37 @@ class NeptuneOpenCypherQAChain(Chain):
|
||||
extra_instructions: Optional[str] = None
|
||||
"""Extra instructions by the appended to the query generation prompt."""
|
||||
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
"""Return the input keys.
|
||||
|
||||
@@ -12,7 +12,7 @@ from langchain_core.callbacks.manager import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts.base import BasePromptTemplate
|
||||
from langchain_core.prompts.prompt import PromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import SPARQL_QA_PROMPT
|
||||
from langchain_community.graphs import NeptuneRdfGraph
|
||||
@@ -113,6 +113,37 @@ class NeptuneSparqlQAChain(Chain):
|
||||
extra_instructions: Optional[str] = None
|
||||
"""Extra instructions by the appended to the query generation prompt."""
|
||||
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
return [self.input_key]
|
||||
|
||||
@@ -12,7 +12,7 @@ from langchain.chains.llm import LLMChain
|
||||
from langchain_core.callbacks.manager import CallbackManager, CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts.base import BasePromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import (
|
||||
GRAPHDB_QA_PROMPT,
|
||||
@@ -46,6 +46,37 @@ class OntotextGraphDBQAChain(Chain):
|
||||
input_key: str = "query" #: :meta private:
|
||||
output_key: str = "result" #: :meta private:
|
||||
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
return [self.input_key]
|
||||
|
||||
@@ -11,7 +11,7 @@ from langchain.chains.llm import LLMChain
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts.base import BasePromptTemplate
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chains.graph_qa.prompts import (
|
||||
SPARQL_GENERATION_SELECT_PROMPT,
|
||||
@@ -47,6 +47,37 @@ class GraphSparqlQAChain(Chain):
|
||||
output_key: str = "result" #: :meta private:
|
||||
sparql_query_key: str = "sparql_query" #: :meta private:
|
||||
|
||||
allow_dangerous_requests: bool = False
|
||||
"""Forced user opt-in to acknowledge that the chain can make dangerous requests.
|
||||
|
||||
*Security note*: Make sure that the database connection uses credentials
|
||||
that are narrowly-scoped to only include necessary permissions.
|
||||
Failure to do so may result in data corruption or loss, since the calling
|
||||
code may attempt commands that would result in deletion, mutation
|
||||
of data if appropriately prompted or reading sensitive data if such
|
||||
data is present in the database.
|
||||
The best way to guard against such negative outcomes is to (as appropriate)
|
||||
limit the permissions granted to the credentials used with this tool.
|
||||
|
||||
See https://python.langchain.com/docs/security for more information.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Initialize the chain."""
|
||||
super().__init__(**kwargs)
|
||||
if self.allow_dangerous_requests is not True:
|
||||
raise ValueError(
|
||||
"In order to use this chain, you must acknowledge that it can make "
|
||||
"dangerous requests by setting `allow_dangerous_requests` to `True`."
|
||||
"You must narrowly scope the permissions of the database connection "
|
||||
"to only include necessary permissions. Failure to do so may result "
|
||||
"in data corruption or loss or reading sensitive data if such data is "
|
||||
"present in the database."
|
||||
"Only use this chain if you understand the risks and have taken the "
|
||||
"necessary precautions. "
|
||||
"See https://python.langchain.com/docs/security for more information."
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
"""Return the input keys.
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import Any, Dict, List, Optional
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from pydantic import ConfigDict, Field, model_validator
|
||||
|
||||
from langchain_community.utilities.requests import TextRequestsWrapper
|
||||
|
||||
@@ -38,9 +38,10 @@ class LLMRequestsChain(Chain):
|
||||
input_key: str = "url" #: :meta private:
|
||||
output_key: str = "output" #: :meta private:
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
extra = "forbid"
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
extra="forbid",
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
@@ -58,8 +59,9 @@ class LLMRequestsChain(Chain):
|
||||
"""
|
||||
return [self.output_key]
|
||||
|
||||
@root_validator(pre=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_environment(cls, values: Dict) -> Any:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
try:
|
||||
from bs4 import BeautifulSoup # noqa: F401
|
||||
|
||||
@@ -11,7 +11,7 @@ from langchain.chains.base import Chain
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun, Callbacks
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from pydantic import BaseModel, Field
|
||||
from requests import Response
|
||||
|
||||
from langchain_community.tools.openapi.utils.api_models import APIOperation
|
||||
|
||||
@@ -17,8 +17,8 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.documents import Document
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.pydantic_v1 import Field, validator
|
||||
from langchain_core.vectorstores import VectorStoreRetriever
|
||||
from pydantic import ConfigDict, Field, validator
|
||||
|
||||
from langchain_community.chains.pebblo_retrieval.enforcement_filters import (
|
||||
SUPPORTED_VECTORSTORES,
|
||||
@@ -189,10 +189,11 @@ class PebbloRetrievalQA(Chain):
|
||||
else:
|
||||
return {self.output_key: answer}
|
||||
|
||||
class Config:
|
||||
allow_population_by_field_name = True
|
||||
arbitrary_types_allowed = True
|
||||
extra = "forbid"
|
||||
model_config = ConfigDict(
|
||||
populate_by_name=True,
|
||||
arbitrary_types_allowed=True,
|
||||
extra="forbid",
|
||||
)
|
||||
|
||||
@property
|
||||
def input_keys(self) -> List[str]:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from typing import Any, List, Optional, Union
|
||||
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AuthContext(BaseModel):
|
||||
|
||||
@@ -10,9 +10,9 @@ import aiohttp
|
||||
from aiohttp import ClientTimeout
|
||||
from langchain_core.documents import Document
|
||||
from langchain_core.env import get_runtime_environment
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.vectorstores import VectorStoreRetriever
|
||||
from pydantic import BaseModel
|
||||
from requests import Response, request
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user