mirror of
https://github.com/hwchase17/langchain.git
synced 2026-01-21 21:56:38 +00:00
Compare commits
85 Commits
eugene/foo
...
eugene/qa_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4cfc22d679 | ||
|
|
f274656ae8 | ||
|
|
a47b332841 | ||
|
|
0f07cf61da | ||
|
|
d158401e73 | ||
|
|
de58942618 | ||
|
|
df38d5250f | ||
|
|
b246052184 | ||
|
|
52729ac0be | ||
|
|
f62d454f36 | ||
|
|
6fe2536c5a | ||
|
|
418b170f94 | ||
|
|
c3b3f46cb8 | ||
|
|
e2245fac82 | ||
|
|
1a8e9023de | ||
|
|
1a62f9850f | ||
|
|
6ed50e78c9 | ||
|
|
5ced41bf50 | ||
|
|
c6bdd6f482 | ||
|
|
3a99467ccb | ||
|
|
2ef4c9466f | ||
|
|
194adc485c | ||
|
|
97b05d70e6 | ||
|
|
e1d113ea84 | ||
|
|
7c05f71e0f | ||
|
|
145a49cca2 | ||
|
|
5fc44989bf | ||
|
|
f4a65236ee | ||
|
|
06cde06a20 | ||
|
|
3e51fdc840 | ||
|
|
0a177ec2cc | ||
|
|
6758894af1 | ||
|
|
6ba3c715b7 | ||
|
|
d8952b8e8c | ||
|
|
31f61d4d7d | ||
|
|
99abd254fb | ||
|
|
3bcd641bc1 | ||
|
|
0bd98c99b3 | ||
|
|
8a2f2fc30b | ||
|
|
724a53711b | ||
|
|
c6a78132d6 | ||
|
|
a319a0ff1d | ||
|
|
63c3cc1f1f | ||
|
|
0154c586d3 | ||
|
|
c2588b334f | ||
|
|
8b985a42e9 | ||
|
|
5b4206acd8 | ||
|
|
0592c29e9b | ||
|
|
88891477eb | ||
|
|
88bc15d69b | ||
|
|
1ab181f514 | ||
|
|
ee4e11379f | ||
|
|
bd42344b0a | ||
|
|
9f5960a0aa | ||
|
|
135afdf4fb | ||
|
|
4131be63af | ||
|
|
f66b7ba32d | ||
|
|
9c6aa3f0b7 | ||
|
|
2240ca2979 | ||
|
|
77ccb4b1cf | ||
|
|
b47f4cfe51 | ||
|
|
779a008d4e | ||
|
|
4e6620ecdd | ||
|
|
543a80569c | ||
|
|
9c88037dbc | ||
|
|
a2bfa41216 | ||
|
|
8abc7ff55a | ||
|
|
6abb23ca97 | ||
|
|
900115a568 | ||
|
|
17b397ef93 | ||
|
|
ca304ae046 | ||
|
|
537f6924dc | ||
|
|
995dfc6b05 | ||
|
|
832bc834b1 | ||
|
|
6997731729 | ||
|
|
64bfe1ff23 | ||
|
|
58c7414e10 | ||
|
|
125c9896a8 | ||
|
|
f7ae12fa1f | ||
|
|
d1462badaf | ||
|
|
9b30bdceb6 | ||
|
|
3125a89198 | ||
|
|
44791ce131 | ||
|
|
fa8e0d90de | ||
|
|
222caaebdd |
122
.github/DISCUSSION_TEMPLATE/q-a.yml
vendored
122
.github/DISCUSSION_TEMPLATE/q-a.yml
vendored
@@ -1,122 +0,0 @@
|
||||
labels: [Question]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for your interest in LangChain 🦜️🔗!
|
||||
|
||||
Please follow these instructions, fill every question, and do every step. 🙏
|
||||
|
||||
We're asking for this because answering questions and solving problems in GitHub takes a lot of time --
|
||||
this is time that we cannot spend on adding new features, fixing bugs, writing documentation or reviewing pull requests.
|
||||
|
||||
By asking questions in a structured way (following this) it will be much easier for us to help you.
|
||||
|
||||
There's a high chance that by following this process, you'll find the solution on your own, eliminating the need to submit a question and wait for an answer. 😎
|
||||
|
||||
As there are many questions submitted every day, we will **DISCARD** and close the incomplete ones.
|
||||
|
||||
That will allow us (and others) to focus on helping people like you that follow the whole process. 🤓
|
||||
|
||||
Relevant links to check before opening a question to see if your question has already been answered, fixed or
|
||||
if there's another way to solve your problem:
|
||||
|
||||
[LangChain documentation with the integrated search](https://python.langchain.com/docs/get_started/introduction),
|
||||
[API Reference](https://api.python.langchain.com/en/stable/),
|
||||
[GitHub search](https://github.com/langchain-ai/langchain),
|
||||
[LangChain Github Discussions](https://github.com/langchain-ai/langchain/discussions),
|
||||
[LangChain Github Issues](https://github.com/langchain-ai/langchain/issues?q=is%3Aissue),
|
||||
[LangChain ChatBot](https://chat.langchain.com/)
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: Checked other resources
|
||||
description: Please confirm and check all the following options.
|
||||
options:
|
||||
- label: I added a very descriptive title to this question.
|
||||
required: true
|
||||
- label: I searched the LangChain documentation with the integrated search.
|
||||
required: true
|
||||
- label: I used the GitHub search to find a similar question and didn't find it.
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: help
|
||||
attributes:
|
||||
label: Commit to Help
|
||||
description: |
|
||||
After submitting this, I commit to one of:
|
||||
|
||||
* Read open questions until I find 2 where I can help someone and add a comment to help there.
|
||||
* I already hit the "watch" button in this repository to receive notifications and I commit to help at least 2 people that ask questions in the future.
|
||||
* Once my question is answered, I will mark the answer as "accepted".
|
||||
options:
|
||||
- label: I commit to help with one of those options 👆
|
||||
required: true
|
||||
- type: textarea
|
||||
id: example
|
||||
attributes:
|
||||
label: Example Code
|
||||
description: |
|
||||
Please add a self-contained, [minimal, reproducible, example](https://stackoverflow.com/help/minimal-reproducible-example) with your use case.
|
||||
|
||||
If a maintainer can copy it, run it, and see it right away, there's a much higher chance that you'll be able to get help.
|
||||
|
||||
**Important!**
|
||||
|
||||
* Use code tags (e.g., ```python ... ```) to correctly [format your code](https://help.github.com/en/github/writing-on-github/creating-and-highlighting-code-blocks#syntax-highlighting).
|
||||
* INCLUDE the language label (e.g. `python`) after the first three backticks to enable syntax highlighting. (e.g., ```python rather than ```).
|
||||
* Reduce your code to the minimum required to reproduce the issue if possible. This makes it much easier for others to help you.
|
||||
* Avoid screenshots when possible, as they are hard to read and (more importantly) don't allow others to copy-and-paste your code.
|
||||
|
||||
placeholder: |
|
||||
from langchain_core.runnables import RunnableLambda
|
||||
|
||||
def bad_code(inputs) -> int:
|
||||
raise NotImplementedError('For demo purpose')
|
||||
|
||||
chain = RunnableLambda(bad_code)
|
||||
chain.invoke('Hello!')
|
||||
render: python
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
What is the problem, question, or error?
|
||||
|
||||
Write a short description explaining what you are doing, what you expect to happen, and what is currently happening.
|
||||
placeholder: |
|
||||
* I'm trying to use the `langchain` library to do X.
|
||||
* I expect to see Y.
|
||||
* Instead, it does Z.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: system-info
|
||||
attributes:
|
||||
label: System Info
|
||||
description: |
|
||||
Please share your system info with us.
|
||||
|
||||
"pip freeze | grep langchain"
|
||||
platform (windows / linux / mac)
|
||||
python version
|
||||
|
||||
OR if you're on a recent version of langchain-core you can paste the output of:
|
||||
|
||||
python -m langchain_core.sys_info
|
||||
placeholder: |
|
||||
"pip freeze | grep langchain"
|
||||
platform
|
||||
python version
|
||||
|
||||
Alternatively, if you're on a recent version of langchain-core you can paste the output of:
|
||||
|
||||
python -m langchain_core.sys_info
|
||||
|
||||
These will only surface LangChain packages, don't forget to include any other relevant
|
||||
packages you're using (if you're not sure what's relevant, you can paste the entire output of `pip freeze`).
|
||||
validations:
|
||||
required: true
|
||||
38
.github/DISCUSSION_TEMPLATE/qa.yml
vendored
Normal file
38
.github/DISCUSSION_TEMPLATE/qa.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
labels: [idea]
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: Checked
|
||||
description: Please confirm and check all the following options.
|
||||
options:
|
||||
- label: I searched existing ideas and did not find a similar one
|
||||
required: true
|
||||
- label: I added a very descriptive title
|
||||
required: true
|
||||
- label: I've clearly described the feature request and motivation for it
|
||||
required: true
|
||||
- type: textarea
|
||||
id: feature-request
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Feature request
|
||||
description: |
|
||||
A clear and concise description of the feature proposal. Please provide links to any relevant GitHub repos, papers, or other resources if relevant.
|
||||
- type: textarea
|
||||
id: motivation
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Motivation
|
||||
description: |
|
||||
Please outline the motivation for the proposal. Is your feature request related to a problem? e.g., I'm always frustrated when [...]. If this is related to another GitHub issue, please link here too.
|
||||
- type: textarea
|
||||
id: proposal
|
||||
validations:
|
||||
required: false
|
||||
attributes:
|
||||
label: Proposal (If applicable)
|
||||
description: |
|
||||
If you would like to propose a solution, please describe it here.
|
||||
26
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
26
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -96,25 +96,21 @@ body:
|
||||
attributes:
|
||||
label: System Info
|
||||
description: |
|
||||
Please share your system info with us.
|
||||
Please share your system info with us. Do NOT skip this step and please don't trim
|
||||
the output. Most users don't include enough information here and it makes it harder
|
||||
for us to help you.
|
||||
|
||||
"pip freeze | grep langchain"
|
||||
platform (windows / linux / mac)
|
||||
python version
|
||||
|
||||
OR if you're on a recent version of langchain-core you can paste the output of:
|
||||
Run the following command in your terminal and paste the output here:
|
||||
|
||||
python -m langchain_core.sys_info
|
||||
|
||||
or if you have an existing python interpreter running:
|
||||
|
||||
from langchain_core import sys_info
|
||||
sys_info.print_sys_info()
|
||||
|
||||
alternatively, put the entire output of `pip freeze` here.
|
||||
placeholder: |
|
||||
"pip freeze | grep langchain"
|
||||
platform
|
||||
python version
|
||||
|
||||
Alternatively, if you're on a recent version of langchain-core you can paste the output of:
|
||||
|
||||
python -m langchain_core.sys_info
|
||||
|
||||
These will only surface LangChain packages, don't forget to include any other relevant
|
||||
packages you're using (if you're not sure what's relevant, you can paste the entire output of `pip freeze`).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
15
.github/scripts/get_min_versions.py
vendored
15
.github/scripts/get_min_versions.py
vendored
@@ -19,10 +19,16 @@ MIN_VERSION_LIBS = [
|
||||
"langchain",
|
||||
"langchain-text-splitters",
|
||||
"SQLAlchemy",
|
||||
"pydantic",
|
||||
]
|
||||
|
||||
SKIP_IF_PULL_REQUEST = ["langchain-core"]
|
||||
# some libs only get checked on release because of simultaneous changes in
|
||||
# multiple libs
|
||||
SKIP_IF_PULL_REQUEST = [
|
||||
"langchain-core",
|
||||
"langchain-text-splitters",
|
||||
"langchain",
|
||||
"langchain-community",
|
||||
]
|
||||
|
||||
|
||||
def get_min_version(version: str) -> str:
|
||||
@@ -68,10 +74,10 @@ def get_min_version_from_toml(
|
||||
min_versions = {}
|
||||
|
||||
# Iterate over the libs in MIN_VERSION_LIBS
|
||||
for lib in MIN_VERSION_LIBS:
|
||||
for lib in set(MIN_VERSION_LIBS + (include or [])):
|
||||
if versions_for == "pull_request" and lib in SKIP_IF_PULL_REQUEST:
|
||||
# some libs only get checked on release because of simultaneous
|
||||
# changes
|
||||
# changes in multiple libs
|
||||
continue
|
||||
# Check if the lib is present in the dependencies
|
||||
if lib in dependencies:
|
||||
@@ -89,7 +95,6 @@ def get_min_version_from_toml(
|
||||
if check_python_version(python_version, vs["python"])
|
||||
][0]["version"]
|
||||
|
||||
|
||||
# Use parse_version to get the minimum supported version from version_string
|
||||
min_version = get_min_version(version_string)
|
||||
|
||||
|
||||
5
.github/workflows/_release.yml
vendored
5
.github/workflows/_release.yml
vendored
@@ -85,7 +85,7 @@ jobs:
|
||||
path: langchain
|
||||
sparse-checkout: | # this only grabs files for relevant dir
|
||||
${{ inputs.working-directory }}
|
||||
ref: master # this scopes to just master branch
|
||||
ref: ${{ github.ref }} # this scopes to just ref'd branch
|
||||
fetch-depth: 0 # this fetches entire commit history
|
||||
- name: Check Tags
|
||||
id: check-tags
|
||||
@@ -232,7 +232,8 @@ jobs:
|
||||
id: min-version
|
||||
run: |
|
||||
poetry run pip install packaging
|
||||
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml release ${{ steps.setup-python.outputs.installed-python-version }})"
|
||||
python_version="$(poetry run python --version | awk '{print $2}')"
|
||||
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml release $python_version)"
|
||||
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
|
||||
echo "min-versions=$min_versions"
|
||||
|
||||
|
||||
3
.github/workflows/_test.yml
vendored
3
.github/workflows/_test.yml
vendored
@@ -48,7 +48,6 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
poetry run pip install packaging tomli
|
||||
echo "Python version ${{ steps.setup-python.outputs.installed-python-version }}"
|
||||
python_version="$(poetry run python --version | awk '{print $2}')"
|
||||
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml pull_request $python_version)"
|
||||
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
|
||||
@@ -59,7 +58,7 @@ jobs:
|
||||
env:
|
||||
MIN_VERSIONS: ${{ steps.min-version.outputs.min-versions }}
|
||||
run: |
|
||||
poetry run pip install --force-reinstall $MIN_VERSIONS --editable .
|
||||
poetry run pip install $MIN_VERSIONS
|
||||
make tests
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ conda install langchain -c conda-forge
|
||||
For these applications, LangChain simplifies the entire application lifecycle:
|
||||
|
||||
- **Open-source libraries**: Build your applications using LangChain's open-source [building blocks](https://python.langchain.com/v0.2/docs/concepts#langchain-expression-language-lcel), [components](https://python.langchain.com/v0.2/docs/concepts), and [third-party integrations](https://python.langchain.com/v0.2/docs/integrations/platforms/).
|
||||
Use [LangGraph](/docs/concepts/#langgraph) to build stateful agents with first-class streaming and human-in-the-loop support.
|
||||
Use [LangGraph](https://langchain-ai.github.io/langgraph/) to build stateful agents with first-class streaming and human-in-the-loop support.
|
||||
- **Productionization**: Inspect, monitor, and evaluate your apps with [LangSmith](https://docs.smith.langchain.com/) so that you can constantly optimize and deploy with confidence.
|
||||
- **Deployment**: Turn your LangGraph applications into production-ready APIs and Assistants with [LangGraph Cloud](https://langchain-ai.github.io/langgraph/cloud/).
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
:member-order: groupwise
|
||||
:show-inheritance: True
|
||||
:special-members: __call__
|
||||
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace, astream_log, transform, atransform, get_output_schema, get_prompts, config_schema, map, pick, pipe, with_listeners, with_alisteners, with_config, with_fallbacks, with_types, with_retry, InputType, OutputType, config_specs, output_schema, get_input_schema, get_graph, get_name, input_schema, name, bind, assign, as_tool, get_config_jsonschema, get_input_jsonschema, get_output_jsonschema, model_construct, model_copy, model_dump, model_dump_json, model_parametrized_name, model_post_init, model_rebuild, model_validate, model_validate_json, model_validate_strings, to_json, model_extra, model_fields_set, model_json_schema
|
||||
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace, astream_log, transform, atransform, get_output_schema, get_prompts, config_schema, map, pick, pipe, InputType, OutputType, config_specs, output_schema, get_input_schema, get_graph, get_name, input_schema, name, assign, as_tool, get_config_jsonschema, get_input_jsonschema, get_output_jsonschema, model_construct, model_copy, model_dump, model_dump_json, model_parametrized_name, model_post_init, model_rebuild, model_validate, model_validate_json, model_validate_strings, to_json, model_extra, model_fields_set, model_json_schema, predict, apredict, predict_messages, apredict_messages, generate, generate_prompt, agenerate, agenerate_prompt, call_as_llm
|
||||
|
||||
.. NOTE:: {{objname}} implements the standard :py:class:`Runnable Interface <langchain_core.runnables.base.Runnable>`. 🏃
|
||||
|
||||
|
||||
@@ -595,10 +595,10 @@ tool_call = ai_msg.tool_calls[0]
|
||||
# -> ToolCall(args={...}, id=..., ...)
|
||||
tool_message = tool.invoke(tool_call)
|
||||
# -> ToolMessage(
|
||||
content="tool result foobar...",
|
||||
tool_call_id=...,
|
||||
name="tool_name"
|
||||
)
|
||||
# content="tool result foobar...",
|
||||
# tool_call_id=...,
|
||||
# name="tool_name"
|
||||
# )
|
||||
```
|
||||
|
||||
If you are invoking the tool this way and want to include an [artifact](/docs/concepts/#toolmessage) for the ToolMessage, you will need to have the tool return two things.
|
||||
@@ -717,8 +717,6 @@ During run-time LangChain configures an appropriate callback manager (e.g., [Cal
|
||||
|
||||
The `callbacks` property is available on most objects throughout the API (Models, Tools, Agents, etc.) in two different places:
|
||||
|
||||
The callbacks are available on most objects throughout the API (Models, Tools, Agents, etc.) in two different places:
|
||||
|
||||
- **Request time callbacks**: Passed at the time of the request in addition to the input data.
|
||||
Available on all standard `Runnable` objects. These callbacks are INHERITED by all children
|
||||
of the object they are defined on. For example, `chain.invoke({"number": 25}, {"callbacks": [handler]})`.
|
||||
|
||||
@@ -206,7 +206,7 @@
|
||||
" ) -> List[Document]:\n",
|
||||
" \"\"\"Get docs, adding score information.\"\"\"\n",
|
||||
" docs, scores = zip(\n",
|
||||
" *vectorstore.similarity_search_with_score(query, **search_kwargs)\n",
|
||||
" *self.vectorstore.similarity_search_with_score(query, **search_kwargs)\n",
|
||||
" )\n",
|
||||
" for doc, score in zip(docs, scores):\n",
|
||||
" doc.metadata[\"score\"] = score\n",
|
||||
|
||||
@@ -15,43 +15,15 @@
|
||||
"\n",
|
||||
"Make sure you have the integration packages installed for any model providers you want to support. E.g. you should have `langchain-openai` installed to init an OpenAI model.\n",
|
||||
"\n",
|
||||
":::\n",
|
||||
"\n",
|
||||
":::info Requires ``langchain >= 0.2.8``\n",
|
||||
"\n",
|
||||
"This functionality was added in ``langchain-core == 0.2.8``. Please make sure your package is up to date.\n",
|
||||
"\n",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"execution_count": null,
|
||||
"id": "165b0de6-9ae3-4e3d-aa98-4fc8a97c4a06",
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:32.858670Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:32.858278Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:33.009452Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:33.007022Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"zsh:1: 0.2.8 not found\r\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Note: you may need to restart the kernel to use updated packages.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install -qU langchain>=0.2.8 langchain-openai langchain-anthropic langchain-google-vertexai"
|
||||
]
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"\n",
|
||||
"This sample demonstrates the use of `Amazon Textract` in combination with LangChain as a DocumentLoader.\n",
|
||||
"\n",
|
||||
"`Textract` supports`PDF`, `TIF`F, `PNG` and `JPEG` format.\n",
|
||||
"`Textract` supports`PDF`, `TIFF`, `PNG` and `JPEG` format.\n",
|
||||
"\n",
|
||||
"`Textract` supports these [document sizes, languages and characters](https://docs.aws.amazon.com/textract/latest/dg/limits-document.html)."
|
||||
]
|
||||
|
||||
@@ -61,7 +61,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install -qU firecrawl-py langchain_community"
|
||||
"%pip install -qU firecrawl-py==0.0.20 langchain_community"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"source": [
|
||||
"# Google Speech-to-Text Audio Transcripts\n",
|
||||
"\n",
|
||||
"The `GoogleSpeechToTextLoader` allows to transcribe audio files with the [Google Cloud Speech-to-Text API](https://cloud.google.com/speech-to-text) and loads the transcribed text into documents.\n",
|
||||
"The `SpeechToTextLoader` allows to transcribe audio files with the [Google Cloud Speech-to-Text API](https://cloud.google.com/speech-to-text) and loads the transcribed text into documents.\n",
|
||||
"\n",
|
||||
"To use it, you should have the `google-cloud-speech` python package installed, and a Google Cloud project with the [Speech-to-Text API enabled](https://cloud.google.com/speech-to-text/v2/docs/transcribe-client-libraries#before_you_begin).\n",
|
||||
"\n",
|
||||
@@ -41,7 +41,7 @@
|
||||
"source": [
|
||||
"## Example\n",
|
||||
"\n",
|
||||
"The `GoogleSpeechToTextLoader` must include the `project_id` and `file_path` arguments. Audio files can be specified as a Google Cloud Storage URI (`gs://...`) or a local file path.\n",
|
||||
"The `SpeechToTextLoader` must include the `project_id` and `file_path` arguments. Audio files can be specified as a Google Cloud Storage URI (`gs://...`) or a local file path.\n",
|
||||
"\n",
|
||||
"Only synchronous requests are supported by the loader, which has a [limit of 60 seconds or 10MB](https://cloud.google.com/speech-to-text/v2/docs/sync-recognize#:~:text=60%20seconds%20and/or%2010%20MB) per audio file."
|
||||
]
|
||||
@@ -52,13 +52,13 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_google_community import GoogleSpeechToTextLoader\n",
|
||||
"from langchain_google_community import SpeechToTextLoader\n",
|
||||
"\n",
|
||||
"project_id = \"<PROJECT_ID>\"\n",
|
||||
"file_path = \"gs://cloud-samples-data/speech/audio.flac\"\n",
|
||||
"# or a local file path: file_path = \"./audio.wav\"\n",
|
||||
"\n",
|
||||
"loader = GoogleSpeechToTextLoader(project_id=project_id, file_path=file_path)\n",
|
||||
"loader = SpeechToTextLoader(project_id=project_id, file_path=file_path)\n",
|
||||
"\n",
|
||||
"docs = loader.load()"
|
||||
]
|
||||
@@ -152,7 +152,7 @@
|
||||
" RecognitionConfig,\n",
|
||||
" RecognitionFeatures,\n",
|
||||
")\n",
|
||||
"from langchain_google_community import GoogleSpeechToTextLoader\n",
|
||||
"from langchain_google_community import SpeechToTextLoader\n",
|
||||
"\n",
|
||||
"project_id = \"<PROJECT_ID>\"\n",
|
||||
"location = \"global\"\n",
|
||||
@@ -171,7 +171,7 @@
|
||||
" ),\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"loader = GoogleSpeechToTextLoader(\n",
|
||||
"loader = SpeechToTextLoader(\n",
|
||||
" project_id=project_id,\n",
|
||||
" location=location,\n",
|
||||
" recognizer_id=recognizer_id,\n",
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
Install the python SDK:
|
||||
|
||||
```bash
|
||||
pip install firecrawl-py
|
||||
pip install firecrawl-py==0.0.20
|
||||
```
|
||||
|
||||
## Document loader
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
# MLflow Deployments for LLMs
|
||||
# MLflow AI Gateway for LLMs
|
||||
|
||||
>[The MLflow Deployments for LLMs](https://www.mlflow.org/docs/latest/llms/deployments/index.html) is a powerful tool designed to streamline the usage and management of various large
|
||||
>[The MLflow AI Gateway for LLMs](https://www.mlflow.org/docs/latest/llms/deployments/index.html) is a powerful tool designed to streamline the usage and management of various large
|
||||
> language model (LLM) providers, such as OpenAI and Anthropic, within an organization. It offers a high-level interface
|
||||
> that simplifies the interaction with these services by providing a unified endpoint to handle specific LLM related requests.
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
Install `mlflow` with MLflow Deployments dependencies:
|
||||
Install `mlflow` with MLflow GenAI dependencies:
|
||||
|
||||
```sh
|
||||
pip install 'mlflow[genai]'
|
||||
@@ -39,10 +39,10 @@ endpoints:
|
||||
openai_api_key: $OPENAI_API_KEY
|
||||
```
|
||||
|
||||
Start the deployments server:
|
||||
Start the gateway server:
|
||||
|
||||
```sh
|
||||
mlflow deployments start-server --config-path /path/to/config.yaml
|
||||
mlflow gateway start --config-path /path/to/config.yaml
|
||||
```
|
||||
|
||||
## Example provided by `MLflow`
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
# MLflow AI Gateway
|
||||
|
||||
:::warning
|
||||
|
||||
MLflow AI Gateway has been deprecated. Please use [MLflow Deployments for LLMs](/docs/integrations/providers/mlflow/) instead.
|
||||
|
||||
:::
|
||||
|
||||
>[The MLflow AI Gateway](https://www.mlflow.org/docs/latest/index.html) service is a powerful tool designed to streamline the usage and management of various large
|
||||
> language model (LLM) providers, such as OpenAI and Anthropic, within an organization. It offers a high-level interface
|
||||
> that simplifies the interaction with these services by providing a unified endpoint to handle specific LLM related requests.
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
Install `mlflow` with MLflow AI Gateway dependencies:
|
||||
|
||||
```sh
|
||||
pip install 'mlflow[gateway]'
|
||||
```
|
||||
|
||||
Set the OpenAI API key as an environment variable:
|
||||
|
||||
```sh
|
||||
export OPENAI_API_KEY=...
|
||||
```
|
||||
|
||||
Create a configuration file:
|
||||
|
||||
```yaml
|
||||
routes:
|
||||
- name: completions
|
||||
route_type: llm/v1/completions
|
||||
model:
|
||||
provider: openai
|
||||
name: text-davinci-003
|
||||
config:
|
||||
openai_api_key: $OPENAI_API_KEY
|
||||
|
||||
- name: embeddings
|
||||
route_type: llm/v1/embeddings
|
||||
model:
|
||||
provider: openai
|
||||
name: text-embedding-ada-002
|
||||
config:
|
||||
openai_api_key: $OPENAI_API_KEY
|
||||
```
|
||||
|
||||
Start the Gateway server:
|
||||
|
||||
```sh
|
||||
mlflow gateway start --config-path /path/to/config.yaml
|
||||
```
|
||||
|
||||
## Example provided by `MLflow`
|
||||
|
||||
>The `mlflow.langchain` module provides an API for logging and loading `LangChain` models.
|
||||
> This module exports multivariate LangChain models in the langchain flavor and univariate LangChain
|
||||
> models in the pyfunc flavor.
|
||||
|
||||
See the [API documentation and examples](https://www.mlflow.org/docs/latest/python_api/mlflow.langchain.html?highlight=langchain#module-mlflow.langchain).
|
||||
|
||||
|
||||
|
||||
## Completions Example
|
||||
|
||||
```python
|
||||
import mlflow
|
||||
from langchain.chains import LLMChain, PromptTemplate
|
||||
from langchain_community.llms import MlflowAIGateway
|
||||
|
||||
gateway = MlflowAIGateway(
|
||||
gateway_uri="http://127.0.0.1:5000",
|
||||
route="completions",
|
||||
params={
|
||||
"temperature": 0.0,
|
||||
"top_p": 0.1,
|
||||
},
|
||||
)
|
||||
|
||||
llm_chain = LLMChain(
|
||||
llm=gateway,
|
||||
prompt=PromptTemplate(
|
||||
input_variables=["adjective"],
|
||||
template="Tell me a {adjective} joke",
|
||||
),
|
||||
)
|
||||
result = llm_chain.run(adjective="funny")
|
||||
print(result)
|
||||
|
||||
with mlflow.start_run():
|
||||
model_info = mlflow.langchain.log_model(chain, "model")
|
||||
|
||||
model = mlflow.pyfunc.load_model(model_info.model_uri)
|
||||
print(model.predict([{"adjective": "funny"}]))
|
||||
```
|
||||
|
||||
## Embeddings Example
|
||||
|
||||
```python
|
||||
from langchain_community.embeddings import MlflowAIGatewayEmbeddings
|
||||
|
||||
embeddings = MlflowAIGatewayEmbeddings(
|
||||
gateway_uri="http://127.0.0.1:5000",
|
||||
route="embeddings",
|
||||
)
|
||||
|
||||
print(embeddings.embed_query("hello"))
|
||||
print(embeddings.embed_documents(["hello"]))
|
||||
```
|
||||
|
||||
## Chat Example
|
||||
|
||||
```python
|
||||
from langchain_community.chat_models import ChatMLflowAIGateway
|
||||
from langchain_core.messages import HumanMessage, SystemMessage
|
||||
|
||||
chat = ChatMLflowAIGateway(
|
||||
gateway_uri="http://127.0.0.1:5000",
|
||||
route="chat",
|
||||
params={
|
||||
"temperature": 0.1
|
||||
}
|
||||
)
|
||||
|
||||
messages = [
|
||||
SystemMessage(
|
||||
content="You are a helpful assistant that translates English to French."
|
||||
),
|
||||
HumanMessage(
|
||||
content="Translate this sentence from English to French: I love programming."
|
||||
),
|
||||
]
|
||||
print(chat(messages))
|
||||
```
|
||||
|
||||
## Databricks MLflow AI Gateway
|
||||
|
||||
Databricks MLflow AI Gateway is in private preview.
|
||||
Please contact a Databricks representative to enroll in the preview.
|
||||
|
||||
```python
|
||||
from langchain.chains import LLMChain
|
||||
from langchain_core.prompts import PromptTemplate
|
||||
from langchain_community.llms import MlflowAIGateway
|
||||
|
||||
gateway = MlflowAIGateway(
|
||||
gateway_uri="databricks",
|
||||
route="completions",
|
||||
)
|
||||
|
||||
llm_chain = LLMChain(
|
||||
llm=gateway,
|
||||
prompt=PromptTemplate(
|
||||
input_variables=["adjective"],
|
||||
template="Tell me a {adjective} joke",
|
||||
),
|
||||
)
|
||||
result = llm_chain.run(adjective="funny")
|
||||
print(result)
|
||||
```
|
||||
@@ -400,18 +400,29 @@
|
||||
"def hybrid_query(search_query: str) -> Dict:\n",
|
||||
" vector = embeddings.embed_query(search_query) # same embeddings as for indexing\n",
|
||||
" return {\n",
|
||||
" \"query\": {\n",
|
||||
" \"match\": {\n",
|
||||
" text_field: search_query,\n",
|
||||
" },\n",
|
||||
" },\n",
|
||||
" \"knn\": {\n",
|
||||
" \"field\": dense_vector_field,\n",
|
||||
" \"query_vector\": vector,\n",
|
||||
" \"k\": 5,\n",
|
||||
" \"num_candidates\": 10,\n",
|
||||
" },\n",
|
||||
" \"rank\": {\"rrf\": {}},\n",
|
||||
" \"retriever\": {\n",
|
||||
" \"rrf\": {\n",
|
||||
" \"retrievers\": [\n",
|
||||
" {\n",
|
||||
" \"standard\": {\n",
|
||||
" \"query\": {\n",
|
||||
" \"match\": {\n",
|
||||
" text_field: search_query,\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" },\n",
|
||||
" {\n",
|
||||
" \"knn\": {\n",
|
||||
" \"field\": dense_vector_field,\n",
|
||||
" \"query_vector\": vector,\n",
|
||||
" \"k\": 5,\n",
|
||||
" \"num_candidates\": 10,\n",
|
||||
" }\n",
|
||||
" },\n",
|
||||
" ]\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install --upgrade --quiet langchain_google_community"
|
||||
"%pip install --upgrade --quiet langchain-google-community"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -99,7 +99,7 @@
|
||||
"vector_store = Chroma(\n",
|
||||
" collection_name=\"example_collection\",\n",
|
||||
" embedding_function=embeddings,\n",
|
||||
" persist_directory=\"./chroma_langchain_db\", # Where to save data locally, remove if not neccesary\n",
|
||||
" persist_directory=\"./chroma_langchain_db\", # Where to save data locally, remove if not necessary\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
@@ -179,7 +179,7 @@
|
||||
"from langchain_core.documents import Document\n",
|
||||
"\n",
|
||||
"document_1 = Document(\n",
|
||||
" page_content=\"I had chocalate chip pancakes and scrambled eggs for breakfast this morning.\",\n",
|
||||
" page_content=\"I had chocolate chip pancakes and scrambled eggs for breakfast this morning.\",\n",
|
||||
" metadata={\"source\": \"tweet\"},\n",
|
||||
" id=1,\n",
|
||||
")\n",
|
||||
@@ -273,7 +273,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"updated_document_1 = Document(\n",
|
||||
" page_content=\"I had chocalate chip pancakes and fried eggs for breakfast this morning.\",\n",
|
||||
" page_content=\"I had chocolate chip pancakes and fried eggs for breakfast this morning.\",\n",
|
||||
" metadata={\"source\": \"tweet\"},\n",
|
||||
" id=1,\n",
|
||||
")\n",
|
||||
@@ -287,7 +287,7 @@
|
||||
"vector_store.update_document(document_id=uuids[0], document=updated_document_1)\n",
|
||||
"# You can also update multiple documents at once\n",
|
||||
"vector_store.update_documents(\n",
|
||||
" ids=uuids[:2], documents=[updated_document_1, updated_document_1]\n",
|
||||
" ids=uuids[:2], documents=[updated_document_1, updated_document_2]\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -380,7 +380,7 @@
|
||||
"source": [
|
||||
"## API reference\n",
|
||||
"\n",
|
||||
"For detailed documentation of all `AstraDBVectorStore` features and configurations head to the API reference:https://python.langchain.com/api_reference/community/vectorstores/langchain_community.vectorstores.clickhouse.Clickhouse.html"
|
||||
"For detailed documentation of all `Clickhouse` features and configurations head to the API reference:https://python.langchain.com/api_reference/community/vectorstores/langchain_community.vectorstores.clickhouse.Clickhouse.html"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
||||
@@ -85,8 +85,8 @@ Build stateful, multi-actor applications with LLMs. Integrates smoothly with Lan
|
||||
|
||||
## Additional resources
|
||||
|
||||
### [Versions](/docs/versions/overview/)
|
||||
See what changed in v0.2, learn how to migrate legacy code, and read up on our release/versioning policies, and more.
|
||||
### [Versions](/docs/versions/v0_3/)
|
||||
See what changed in v0.3, learn how to migrate legacy code, read up on our versioning policies, and more.
|
||||
|
||||
### [Security](/docs/security)
|
||||
Read up on [security](/docs/security) best practices to make sure you're developing safely with LangChain.
|
||||
|
||||
@@ -90,7 +90,7 @@
|
||||
"source": [
|
||||
" </TabItem>\n",
|
||||
" <TabItem value=\"conda\" label=\"Conda\">\n",
|
||||
" <CodeBlock language=\"bash\">conda install langchain langchain_community langchain_chroma -c conda-forge</CodeBlock>\n",
|
||||
" <CodeBlock language=\"bash\">conda install langchain langchain-community langchain-chroma -c conda-forge</CodeBlock>\n",
|
||||
" </TabItem>\n",
|
||||
"</Tabs>\n",
|
||||
"\n",
|
||||
|
||||
@@ -8,7 +8,7 @@ keywords: [retrievalqa, llmchain, conversationalretrievalchain]
|
||||
|
||||
This code contains a list of deprecations and removals in the `langchain` and `langchain-core` packages.
|
||||
|
||||
New features and improvements are not listed here. See the [overview](/docs/versions/overview/) for a summary of what's new in this release.
|
||||
New features and improvements are not listed here. See the [overview](/docs/versions/v0_2/overview/) for a summary of what's new in this release.
|
||||
|
||||
## Breaking changes
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Migrating to LangChain v0.2
|
||||
# Migration
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ sidebar_position: 2
|
||||
sidebar_label: astream_events v2
|
||||
---
|
||||
|
||||
# Migrating to Astream Events v2
|
||||
# Migrating to astream_events(..., version="v2")
|
||||
|
||||
We've added a `v2` of the astream_events API with the release of `0.2.x`. You can see this [PR](https://github.com/langchain-ai/langchain/pull/21638) for more details.
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
---
|
||||
sidebar_position: 0
|
||||
sidebar_label: Overview of v0.2
|
||||
---
|
||||
|
||||
# Overview of LangChain v0.2
|
||||
# Overview
|
||||
|
||||
## What’s new in LangChain?
|
||||
|
||||
271
docs/docs/versions/v0_3/index.mdx
Normal file
271
docs/docs/versions/v0_3/index.mdx
Normal file
@@ -0,0 +1,271 @@
|
||||
# LangChain v0.3
|
||||
|
||||
*Last updated: 09.16.24*
|
||||
|
||||
## What's changed
|
||||
|
||||
* All packages have been upgraded from Pydantic 1 to Pydantic 2 internally. Use of Pydantic 2 in user code is fully supported with all packages without the need for bridges like `langchain_core.pydantic_v1` or `pydantic.v1`.
|
||||
* Pydantic 1 will no longer be supported as it reached its end-of-life in June 2024.
|
||||
* Python 3.8 will no longer be supported as its end-of-life is October 2024.
|
||||
|
||||
**These are the only breaking changes.**
|
||||
|
||||
## What’s new
|
||||
|
||||
The following features have been added during the development of 0.2.x:
|
||||
|
||||
- Moved more integrations from `langchain-community` to their own `langchain-x` packages. This is a non-breaking change, as the legacy implementations are left in `langchain-community` and marked as deprecated. This allows us to better manage the dependencies of, test, and version these integrations. You can see all the latest integration packages in the [API reference](https://python.langchain.com/v0.2/api_reference/reference.html#integrations).
|
||||
- Simplified tool definition and usage. Read more [here](https://blog.langchain.dev/improving-core-tool-interfaces-and-docs-in-langchain/).
|
||||
- Added utilities for interacting with chat models: [universal model constructor](https://python.langchain.com/v0.2/docs/how_to/chat_models_universal_init/), [rate limiter](https://python.langchain.com/v0.2/docs/how_to/chat_model_rate_limiting/), [message utilities](https://python.langchain.com/v0.2/docs/how_to/#messages),
|
||||
- Added the ability to [dispatch custom events](https://python.langchain.com/v0.2/docs/how_to/callbacks_custom_events/).
|
||||
- Revamped integration docs and API reference. Read more [here](https://blog.langchain.dev/langchain-integration-docs-revamped/).
|
||||
- Marked as deprecated a number of legacy chains and added migration guides for all of them. These are slated for removal in `langchain` 1.0.0. See the deprecated chains and associated [migration guides here](https://python.langchain.com/v0.2/docs/versions/migrating_chains/).
|
||||
|
||||
## How to update your code
|
||||
|
||||
If you're using `langchain` / `langchain-community` / `langchain-core` 0.0 or 0.1, we recommend that you first [upgrade to 0.2](https://python.langchain.com/v0.2/docs/versions/v0_2/).
|
||||
|
||||
If you're using `langgraph`, upgrade to `langgraph>=0.2.20,<0.3`. This will work with either 0.2 or 0.3 versions of all the base packages.
|
||||
|
||||
Here is a complete list of all packages that have been released and what we recommend upgrading your version constraints to.
|
||||
Any package that now requires `langchain-core` 0.3 had a minor version bump.
|
||||
Any package that is now compatible with both `langchain-core` 0.2 and 0.3 had a patch version bump.
|
||||
|
||||
You can use the `langchain-cli` to update deprecated imports automatically.
|
||||
The CLI will handle updating deprecated imports that were introduced in LangChain 0.0.x and LangChain 0.1, as
|
||||
well as updating the `langchain_core.pydantic_v1` and `langchain.pydantic_v1` imports.
|
||||
|
||||
|
||||
### Base packages
|
||||
|
||||
| Package | Latest | Recommended constraint |
|
||||
|--------------------------|--------|------------------------|
|
||||
| langchain | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-community | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-text-splitters | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-core | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-experimental | 0.3.0 | >=0.3,<0.4 |
|
||||
|
||||
### Downstream packages
|
||||
|
||||
| Package | Latest | Recommended constraint |
|
||||
|-----------|--------|------------------------|
|
||||
| langgraph | 0.2.20 | >=0.2.20,<0.3 |
|
||||
| langserve | 0.3.0 | >=0.3,<0.4 |
|
||||
|
||||
### Integration packages
|
||||
|
||||
| Package | Latest | Recommended constraint |
|
||||
| -------------------------------------- | ------- | -------------------------- |
|
||||
| langchain-ai21 | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-aws | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-anthropic | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-astradb | 0.4.1 | >=0.4.1,<0.5 |
|
||||
| langchain-azure-dynamic-sessions | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-box | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-chroma | 0.1.4 | >=0.1.4,<0.2 |
|
||||
| langchain-cohere | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-elasticsearch | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-exa | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-fireworks | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-groq | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-google-community | 2.0.0 | >=2,<3 |
|
||||
| langchain-google-genai | 2.0.0 | >=2,<3 |
|
||||
| langchain-google-vertexai | 2.0.0 | >=2,<3 |
|
||||
| langchain-huggingface | 0.1.0 | >=0.1,<0.2 |
|
||||
| langchain-ibm | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-milvus | 0.1.6 | >=0.1.6,<0.2 |
|
||||
| langchain-mistralai | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-mongodb | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-nomic | 0.1.3 | >=0.1.3,<0.2 |
|
||||
| langchain-ollama | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-openai | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-pinecone | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-postgres | 0.0.13 | >=0.0.13,<0.1 |
|
||||
| langchain-prompty | 0.1.0 | >=0.1,<0.2 |
|
||||
| langchain-qdrant | 0.1.4 | >=0.1.4,<0.2 |
|
||||
| langchain-redis | 0.1.0 | >=0.1,<0.2 |
|
||||
| langchain-sema4 | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-together | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-unstructured | 0.1.4 | >=0.1.4,<0.2 |
|
||||
| langchain-upstage | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-voyageai | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-weaviate | 0.0.3 | >=0.0.3,<0.1 |
|
||||
|
||||
Once you've updated to recent versions of the packages, you may need to address the following issues stemming from the internal switch from Pydantic v1 to Pydantic v2:
|
||||
|
||||
- If your code depends on Pydantic aside from LangChain, you will need to upgrade your pydantic version constraints to be `pydantic>=2,<3`. See [Pydantic’s migration guide](https://docs.pydantic.dev/latest/migration/) for help migrating your non-LangChain code to Pydantic v2 if you use pydantic v1.
|
||||
- There are a number of side effects to LangChain components caused by the internal switch from Pydantic v1 to v2. We have listed some of the common cases below together with the recommended solutions.
|
||||
|
||||
## Common issues when transitioning to Pydantic 2
|
||||
|
||||
### 1. Do not use the `langchain_core.pydantic_v1` namespace
|
||||
|
||||
Replace any usage of `langchain_core.pydantic_v1` or `langchain.pydantic_v1` with
|
||||
direct imports from `pydantic`.
|
||||
|
||||
For example,
|
||||
|
||||
```python
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
```
|
||||
|
||||
to:
|
||||
|
||||
```python
|
||||
from pydantic import BaseModel
|
||||
```
|
||||
|
||||
This may require you to make additional updates to your Pydantic code given that there are a number of breaking changes in Pydantic 2. See the [Pydantic Migration](https://docs.pydantic.dev/latest/migration/) for how to upgrade your code from Pydantic 1 to 2.
|
||||
|
||||
### 2. Passing Pydantic objects to LangChain APIs
|
||||
|
||||
Users using the following APIs:
|
||||
|
||||
* `BaseChatModel.bind_tools`
|
||||
* `BaseChatModel.with_structured_output`
|
||||
* `Tool.from_function`
|
||||
* `StructuredTool.from_function`
|
||||
|
||||
should ensure that they are passing Pydantic 2 objects to these APIs rather than
|
||||
Pydantic 1 objects (created via the `pydantic.v1` namespace of pydantic 2).
|
||||
|
||||
:::caution
|
||||
While `v1` objets may be accepted by some of these APIs, users are advised to
|
||||
use Pydantic 2 objects to avoid future issues.
|
||||
:::
|
||||
|
||||
### 3. Sub-classing LangChain models
|
||||
|
||||
Any sub-classing from existing LangChain models (e.g., `BaseTool`, `BaseChatModel`, `LLM`)
|
||||
should upgrade to use Pydantic 2 features.
|
||||
|
||||
For example, any user code that's relying on Pydantic 1 features (e.g., `validator`) should
|
||||
be updated to the Pydantic 2 equivalent (e.g., `field_validator`), and any references to
|
||||
`pydantic.v1`, `langchain_core.pydantic_v1`, `langchain.pydantic_v1` should be replaced
|
||||
with imports from `pydantic`.
|
||||
|
||||
```python
|
||||
from pydantic.v1 import validator, Field # if pydantic 2 is installed
|
||||
# from pydantic import validator, Field # if pydantic 1 is installed
|
||||
# from langchain_core.pydantic_v1 import validator, Field
|
||||
# from langchain.pydantic_v1 import validator, Field
|
||||
|
||||
class CustomTool(BaseTool): # BaseTool is v1 code
|
||||
x: int = Field(default=1)
|
||||
|
||||
def _run(*args, **kwargs):
|
||||
return "hello"
|
||||
|
||||
@validator('x') # v1 code
|
||||
@classmethod
|
||||
def validate_x(cls, x: int) -> int:
|
||||
return 1
|
||||
```
|
||||
|
||||
Should change to:
|
||||
|
||||
```python
|
||||
from pydantic import Field, field_validator # pydantic v2
|
||||
from langchain_core.pydantic_v1 import BaseTool
|
||||
|
||||
class CustomTool(BaseTool): # BaseTool is v1 code
|
||||
x: int = Field(default=1)
|
||||
|
||||
def _run(*args, **kwargs):
|
||||
return "hello"
|
||||
|
||||
@field_validator('x') # v2 code
|
||||
@classmethod
|
||||
def validate_x(cls, x: int) -> int:
|
||||
return 1
|
||||
|
||||
|
||||
CustomTool(
|
||||
name='custom_tool',
|
||||
description="hello",
|
||||
x=1,
|
||||
)
|
||||
```
|
||||
|
||||
### 4. model_rebuild()
|
||||
|
||||
When sub-classing from LangChain models, users may need to add relevant imports
|
||||
to the file and rebuild the model.
|
||||
|
||||
You can read more about `model_rebuild` [here](https://docs.pydantic.dev/latest/concepts/models/#rebuilding-model-schema).
|
||||
|
||||
```python
|
||||
from langchain_core.output_parsers import BaseOutputParser
|
||||
|
||||
|
||||
class FooParser(BaseOutputParser):
|
||||
...
|
||||
```
|
||||
|
||||
New code:
|
||||
|
||||
```python
|
||||
from typing import Optional as Optional
|
||||
|
||||
from langchain_core.output_parsers import BaseOutputParser
|
||||
|
||||
class FooParser(BaseOutputParser):
|
||||
...
|
||||
|
||||
FooParser.model_rebuild()
|
||||
```
|
||||
|
||||
## Migrate using langchain-cli
|
||||
|
||||
The `langchain-cli` can help update deprecated LangChain imports in your code automatically.
|
||||
|
||||
Please note that the `langchain-cli` only handles deprecated LangChain imports and cannot
|
||||
help to upgrade your code from pydantic 1 to pydantic 2.
|
||||
|
||||
For help with the Pydantic 1 to 2 migration itself please refer to the [Pydantic Migration Guidelines](https://docs.pydantic.dev/latest/migration/).
|
||||
|
||||
As of 0.0.31, the `langchain-cli` relies on [gritql](https://about.grit.io/) for applying code mods.
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
pip install -U langchain-cli
|
||||
langchain-cli --version # <-- Make sure the version is at least 0.0.31
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Given that the migration script is not perfect, you should make sure you have a backup of your code first (e.g., using version control like `git`).
|
||||
|
||||
The `langchain-cli` will handle the `langchain_core.pydantic_v1` deprecation introduced in LangChain 0.3 as well
|
||||
as older deprecations (e.g.,`from langchain.chat_models import ChatOpenAI` which should be `from langchain_openai import ChatOpenAI`),
|
||||
|
||||
You will need to run the migration script **twice** as it only applies one import replacement per run.
|
||||
|
||||
For example, say that your code is still using the old import `from langchain.chat_models import ChatOpenAI`:
|
||||
|
||||
After the first run, you’ll get: `from langchain_community.chat_models import ChatOpenAI`
|
||||
After the second run, you’ll get: `from langchain_openai import ChatOpenAI`
|
||||
|
||||
```bash
|
||||
# Run a first time
|
||||
# Will replace from langchain.chat_models import ChatOpenAI
|
||||
langchain-cli migrate --help [path to code] # Help
|
||||
langchain-cli migrate [path to code] # Apply
|
||||
|
||||
# Run a second time to apply more import replacements
|
||||
langchain-cli migrate --diff [path to code] # Preview
|
||||
langchain-cli migrate [path to code] # Apply
|
||||
```
|
||||
|
||||
### Other options
|
||||
|
||||
```bash
|
||||
# See help menu
|
||||
langchain-cli migrate --help
|
||||
# Preview Changes without applying
|
||||
langchain-cli migrate --diff [path to code]
|
||||
# Approve changes interactively
|
||||
langchain-cli migrate --interactive [path to code]
|
||||
```
|
||||
@@ -1,187 +0,0 @@
|
||||
---
|
||||
sidebar_label: Overview of v0.3
|
||||
---
|
||||
# Overview of LangChain v0.3
|
||||
|
||||
## What’s new in LangChain?
|
||||
|
||||
The following features have been added during the development of 0.2.x:
|
||||
|
||||
- We’ve recently revamped our integration docs and API reference. Read more [here](https://blog.langchain.dev/langchain-integration-docs-revamped/).
|
||||
- We’ve continued to migrate key integrations to their own `langchain-x` packages outside of `langchain-community`. This allows us to better manage the dependencies of, test, and version these integrations. You can see all the latest integration packages in the [API reference](https://python.langchain.com/v0.2/api_reference/reference.html#integrations).
|
||||
- We’ve simplified how to define and use tools. Read more [here](https://blog.langchain.dev/improving-core-tool-interfaces-and-docs-in-langchain/).
|
||||
- We’ve added a number of key utilities for interacting with chat models: [universal model constructor](https://python.langchain.com/v0.2/docs/how_to/chat_models_universal_init/), [rate limiter](https://python.langchain.com/v0.2/docs/how_to/chat_model_rate_limiting/), [message utilities](https://python.langchain.com/v0.2/docs/how_to/#messages),
|
||||
- We've added the ability to [dispatch custom events](https://python.langchain.com/v0.2/docs/how_to/callbacks_custom_events/).
|
||||
- We have marked as deprecated a number of legacy chains and added migration guides for all of them. These are slated for removal in langchain 1.0. See the deprecated chains and associated [migration guides here](https://python.langchain.com/v0.2/docs/versions/migrating_chains/).
|
||||
|
||||
## What's changed
|
||||
|
||||
* As of the 0.3 release, LangChain has been upgraded to use Pydantic 2 internally. Pydantic v2 will be fully supported across new packages without the need for any bridges like `langchain_core.pydantic_v1`.
|
||||
* Pydantic 1 will no longer be supported as it reached its end-of-life in June 2024.
|
||||
* Python 3.8 will no longer be supported as its end-of-life is October 2024.
|
||||
|
||||
## How to update your code
|
||||
|
||||
If you're using LangChain 0.0, or 0.1, we recommend that you first [upgrade to 0.2](https://python.langchain.com/v0.2/docs/versions/v0_2/). The langchain-cli will help you to migrate many imports automatically.
|
||||
|
||||
If you're using LangChain 0.2, update your packages to use `langchain-core>=0.3`. We've released 0.3 versions of langchain-core, langchain, langchain-community and langserve. `langgraph>=0.2.20` will work with either langchain-core 0.2 or 0.3.
|
||||
|
||||
The breaking changes in this release were:
|
||||
|
||||
1. The internal switch from Pydantic v1 to Pydantic v2.
|
||||
2. The removal of the automatic addition of the suffix `Schema` to the names of tools.
|
||||
|
||||
Once you've updated to recent versions of the packages, you may need to address the following issues stemming from the internal switch from Pydantic v1 to Pydantic v2:
|
||||
|
||||
- If your code depends on Pydantic aside from LangChain, you will need to use `pydantic>=2,<3`. See [Pydantic’s migration guide](https://docs.pydantic.dev/latest/migration/) for help migrating your non-LangChain code to Pydantic v2 if you use pydantic v1.
|
||||
- There are a number of side effects to LangChain components caused by the internal switch from Pydantic v1 to v2. We have listed some of the common cases below together with the recommended solutions.
|
||||
|
||||
If you're still using deprecated LangChain please follow the [migration guides here](https://python.langchain.com/v0.2/docs/versions/migrating_chains/).
|
||||
|
||||
## Common issues when transitioning to Pydantic 2
|
||||
|
||||
### 1. Do not use the langchain_core.pydantic_v1 namespace
|
||||
|
||||
Replace any usage of `langchain_core.pydantic_v1` or `langchain.pydantic_v1` with
|
||||
direct imports from `pydantic`.
|
||||
|
||||
For example,
|
||||
|
||||
```python
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
```
|
||||
|
||||
to:
|
||||
|
||||
```python
|
||||
from pydantic import BaseModel
|
||||
```
|
||||
|
||||
### 2. Passing Pydantic objects to LangChain APIs
|
||||
|
||||
Users using the following APIs:
|
||||
|
||||
* `BaseChatModel.bind_tools`
|
||||
* `BaseChatModel.with_structured_output`
|
||||
* `Tool.from_function`
|
||||
* `StructuredTool.from_function`
|
||||
|
||||
should ensure that they are passing Pydantic 2 objects to these APIs rather than
|
||||
Pydantic 1 objects (created via the `pydantic.v1` namespace of pydantic 2).
|
||||
|
||||
:::caution
|
||||
While `v1` objets may be accepted by some of these APIs, users are advised to
|
||||
use Pydantic 2 objects to avoid future issues.
|
||||
:::
|
||||
|
||||
### 3. Sub-classing LangChain models
|
||||
|
||||
Any sub-classing from existing LangChain models (e.g., `BaseTool`, `BaseChatModel`, `LLM`)
|
||||
should upgrade to use Pydantic 2 features.
|
||||
|
||||
For example, any user code that's relying on Pydantic 1 features (e.g., `validator`) should
|
||||
be updated to the Pydantic 2 equivalent (e.g., `field_validator`), and any references to
|
||||
`pydantic.v1`, `langchain_core.pydantic_v1`, `langchain.pydantic_v1` should be replaced
|
||||
with imports from `pydantic`.
|
||||
|
||||
```python
|
||||
from pydantic.v1 import validator, Field # if pydantic 2 is installed
|
||||
# from pydantic import validator, Field # if pydantic 1 is installed
|
||||
# from langchain_core.pydantic_v1 import validator, Field
|
||||
# from langchain.pydantic_v1 import validator, Field
|
||||
|
||||
class CustomTool(BaseTool): # BaseTool is v1 code
|
||||
x: int = Field(default=1)
|
||||
|
||||
def _run(*args, **kwargs):
|
||||
return "hello"
|
||||
|
||||
@validator('x') # v1 code
|
||||
@classmethod
|
||||
def validate_x(cls, x: int) -> int:
|
||||
return 1
|
||||
```
|
||||
|
||||
Should change to:
|
||||
|
||||
```python
|
||||
from pydantic import Field, field_validator # pydantic v2
|
||||
from langchain_core.pydantic_v1 import BaseTool
|
||||
|
||||
class CustomTool(BaseTool): # BaseTool is v1 code
|
||||
x: int = Field(default=1)
|
||||
|
||||
def _run(*args, **kwargs):
|
||||
return "hello"
|
||||
|
||||
@field_validator('x') # v2 code
|
||||
@classmethod
|
||||
def validate_x(cls, x: int) -> int:
|
||||
return 1
|
||||
|
||||
|
||||
CustomTool(
|
||||
name='custom_tool',
|
||||
description="hello",
|
||||
x=1,
|
||||
)
|
||||
```
|
||||
|
||||
### 4. model_rebuild()
|
||||
|
||||
When sub-classing from LangChain models, users may need to add relevant imports
|
||||
to the file and rebuild the model.
|
||||
|
||||
```python
|
||||
from langchain_core.output_parsers import BaseOutputParser
|
||||
|
||||
|
||||
class FooParser(BaseOutputParser):
|
||||
...
|
||||
```
|
||||
|
||||
New code:
|
||||
|
||||
```python
|
||||
from typing import Optional as Optional
|
||||
|
||||
from langchain_core.output_parsers import BaseOutputParser
|
||||
|
||||
class FooParser(BaseOutputParser):
|
||||
...
|
||||
|
||||
FooParser.model_rebuild()
|
||||
```
|
||||
|
||||
## `Schema` suffix removal
|
||||
|
||||
In previous versions of LangChain, the suffix `Schema` was automatically added to the names of tools if a tool name was not
|
||||
specified. This name was used to generate the schema for the tool which was sent to chat models as the JSON Schema for the tool.
|
||||
|
||||
We do not expect most users to be affected by this change.
|
||||
|
||||
For example, the tool
|
||||
|
||||
```python
|
||||
from langchain_core.tools import tool
|
||||
|
||||
@tool
|
||||
def add(x: int, y: int) -> int:
|
||||
"""Add x and y."""
|
||||
return x + y
|
||||
```
|
||||
|
||||
would have been named `addSchema` in previous versions of LangChain. In 0.3, the name of the tool will be `add`.
|
||||
|
||||
```python
|
||||
add.args_schema.model_json_schema()
|
||||
```
|
||||
|
||||
```
|
||||
{'description': 'Add x and y.',
|
||||
'properties': {'x': {'title': 'X', 'type': 'integer'},
|
||||
'y': {'title': 'Y', 'type': 'integer'}},
|
||||
'required': ['x', 'y'],
|
||||
'title': 'add',
|
||||
'type': 'object'}
|
||||
```
|
||||
@@ -168,52 +168,43 @@ const config = {
|
||||
label: "Integrations",
|
||||
},
|
||||
{
|
||||
type: "dropdown",
|
||||
label: "API reference",
|
||||
position: "left",
|
||||
items: [
|
||||
{
|
||||
label: "Latest",
|
||||
to: "https://python.langchain.com/api_reference/reference.html",
|
||||
},
|
||||
{
|
||||
label: "Legacy",
|
||||
href: "https://api.python.langchain.com/"
|
||||
}
|
||||
]
|
||||
label: "API Reference",
|
||||
to: "https://python.langchain.com/api_reference/",
|
||||
},
|
||||
{
|
||||
type: "dropdown",
|
||||
label: "More",
|
||||
position: "left",
|
||||
items: [
|
||||
{
|
||||
type: "doc",
|
||||
docId: "people",
|
||||
label: "People",
|
||||
},
|
||||
{
|
||||
type: "doc",
|
||||
docId: "contributing/index",
|
||||
label: "Contributing",
|
||||
},
|
||||
{
|
||||
label: "Cookbooks",
|
||||
href: "https://github.com/langchain-ai/langchain/blob/master/cookbook/README.md"
|
||||
},
|
||||
{
|
||||
type: "doc",
|
||||
docId: "additional_resources/tutorials",
|
||||
label: "3rd party tutorials"
|
||||
docId: "people",
|
||||
label: "People",
|
||||
},
|
||||
{
|
||||
type: "doc",
|
||||
docId: "additional_resources/youtube",
|
||||
label: "YouTube"
|
||||
type: 'html',
|
||||
value: '<hr class="dropdown-separator" style="margin-top: 0.5rem; margin-bottom: 0.5rem">',
|
||||
},
|
||||
{
|
||||
to: "/docs/additional_resources/arxiv_references",
|
||||
label: "arXiv"
|
||||
href: "https://docs.smith.langchain.com",
|
||||
label: "LangSmith",
|
||||
},
|
||||
{
|
||||
href: "https://langchain-ai.github.io/langgraph/",
|
||||
label: "LangGraph",
|
||||
},
|
||||
{
|
||||
href: "https://smith.langchain.com/hub",
|
||||
label: "LangChain Hub",
|
||||
},
|
||||
{
|
||||
href: "https://js.langchain.com",
|
||||
label: "LangChain JS/TS",
|
||||
},
|
||||
]
|
||||
},
|
||||
@@ -237,30 +228,7 @@ const config = {
|
||||
]
|
||||
},
|
||||
{
|
||||
type: "dropdown",
|
||||
label: "🦜️🔗",
|
||||
position: "right",
|
||||
items: [
|
||||
{
|
||||
href: "https://smith.langchain.com",
|
||||
label: "LangSmith",
|
||||
},
|
||||
{
|
||||
href: "https://docs.smith.langchain.com/",
|
||||
label: "LangSmith Docs",
|
||||
},
|
||||
{
|
||||
href: "https://smith.langchain.com/hub",
|
||||
label: "LangChain Hub",
|
||||
},
|
||||
{
|
||||
href: "https://js.langchain.com",
|
||||
label: "JS/TS Docs",
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
href: "https://chat.langchain.com",
|
||||
to: "https://chat.langchain.com",
|
||||
label: "💬",
|
||||
position: "right",
|
||||
},
|
||||
@@ -330,7 +298,7 @@ const config = {
|
||||
// this is linked to erick@langchain.dev currently
|
||||
apiKey: "6c01842d6a88772ed2236b9c85806441",
|
||||
|
||||
indexName: "python-langchain-0.2",
|
||||
indexName: "python-langchain-latest",
|
||||
|
||||
contextualSearch: false,
|
||||
},
|
||||
|
||||
@@ -72,25 +72,24 @@ module.exports = {
|
||||
collapsed: false,
|
||||
collapsible: false,
|
||||
items: [
|
||||
"versions/v0_3/overview",
|
||||
"versions/overview",
|
||||
{
|
||||
type: 'doc',
|
||||
id: 'versions/v0_3/index',
|
||||
label: "v0.3",
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "v0.2",
|
||||
items: [{
|
||||
type: 'autogenerated',
|
||||
dirName: 'versions/v0_2',
|
||||
}],
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
id: "how_to/pydantic_compatibility",
|
||||
label: "Pydantic compatibility",
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Migrating to v0.2",
|
||||
link: {type: 'doc', id: 'versions/v0_2/index'},
|
||||
collapsible: false,
|
||||
collapsed: false,
|
||||
items: [{
|
||||
type: 'autogenerated',
|
||||
dirName: 'versions/v0_2',
|
||||
className: 'hidden',
|
||||
}],
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Migrating from v0.0 chains",
|
||||
|
||||
@@ -886,7 +886,7 @@ const FEATURE_TABLES = {
|
||||
apiLink: "https://python.langchain.com/api_reference/community/document_loaders/langchain_community.document_loaders.html_bs.BSHTMLLoader.html"
|
||||
},
|
||||
{
|
||||
name: "UnstrucutredXMLLoader",
|
||||
name: "UnstructuredXMLLoader",
|
||||
link: "xml",
|
||||
source: "XML files",
|
||||
apiLink: "https://python.langchain.com/api_reference/community/document_loaders/langchain_community.document_loaders.xml.UnstructuredXMLLoader.html"
|
||||
|
||||
@@ -2538,5 +2538,389 @@ const suggestedLinks = {
|
||||
"alternative": [
|
||||
"/v0.1/docs/use_cases/web_scraping/"
|
||||
]
|
||||
},
|
||||
// below are new
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/": {"canonical": "/docs/how_to/#text-splitters", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter/": {"canonical": "/docs/how_to/character_text_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/character_text_splitter/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/code_splitter/": {"canonical": "/docs/how_to/code_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/code_splitter/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/HTML_header_metadata/": {"canonical": "/docs/how_to/HTML_header_metadata_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/HTML_header_metadata/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/HTML_section_aware_splitter/": {"canonical": "/docs/how_to/HTML_section_aware_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/HTML_section_aware_splitter/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/markdown_header_metadata/": {"canonical": "/docs/how_to/markdown_header_metadata_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/markdown_header_metadata/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/recursive_json_splitter/": {"canonical": "/docs/how_to/recursive_json_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/recursive_json_splitter/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/recursive_text_splitter/": {"canonical": "/docs/how_to/recursive_text_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/recursive_text_splitter/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/semantic-chunker/": {"canonical": "/docs/how_to/semantic-chunker/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/semantic-chunker/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/split_by_token/": {"canonical": "/docs/how_to/split_by_token/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/split_by_token/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/composition/": {"canonical": "/docs/how_to/prompts_composition/", "alternative": ["/v0.1/docs/modules/model_io/prompts/composition/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/example_selectors/": {"canonical": "/docs/how_to/example_selectors/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/example_selectors/length_based/": {"canonical": "/docs/how_to/example_selectors_length_based/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/length_based/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/example_selectors/mmr/": {"canonical": "/docs/how_to/example_selectors_mmr/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/mmr/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/example_selectors/ngram_overlap/": {"canonical": "/docs/how_to/example_selectors_ngram/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/ngram_overlap/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/example_selectors/similarity/": {"canonical": "/docs/how_to/example_selectors_similarity/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/similarity/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat/": {"canonical": "/docs/how_to/few_shot_examples_chat/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples_chat/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/few_shot_examples/": {"canonical": "/docs/how_to/few_shot_examples/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/partial/": {"canonical": "/docs/how_to/prompts_partial/", "alternative": ["/v0.1/docs/modules/model_io/prompts/partial/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/quick_start/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/quick_start/"]},
|
||||
"/docs/modules/model_io/models/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/"]},
|
||||
"/docs/modules/model_io/models/chat/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/chat/"]},
|
||||
"/docs/modules/model_io/models/chat/chat_model_caching/": {"canonical": "/docs/how_to/chat_model_caching/", "alternative": ["/v0.1/docs/modules/model_io/chat/chat_model_caching/"]},
|
||||
"/docs/modules/model_io/models/chat/custom_chat_model/": {"canonical": "/docs/how_to/custom_chat_model/", "alternative": ["/v0.1/docs/modules/model_io/chat/custom_chat_model/"]},
|
||||
"/docs/modules/model_io/models/chat/function_calling/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/model_io/chat/function_calling/"]},
|
||||
"/docs/modules/model_io/models/chat/logprobs/": {"canonical": "/docs/how_to/logprobs/", "alternative": ["/v0.1/docs/modules/model_io/chat/logprobs/"]},
|
||||
"/docs/modules/model_io/models/chat/message_types/": {"canonical": "/docs/concepts/#messages", "alternative": ["/v0.1/docs/modules/model_io/chat/message_types/"]},
|
||||
"/docs/modules/model_io/models/chat/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/chat/quick_start/"]},
|
||||
"/docs/modules/model_io/models/chat/response_metadata/": {"canonical": "/docs/how_to/response_metadata/", "alternative": ["/v0.1/docs/modules/model_io/chat/response_metadata/"]},
|
||||
"/docs/modules/model_io/models/chat/streaming/": {"canonical": "/docs/how_to/streaming/", "alternative": ["/v0.1/docs/modules/model_io/chat/streaming/"]},
|
||||
"/docs/modules/model_io/models/chat/structured_output/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/chat/structured_output/"]},
|
||||
"/docs/modules/model_io/models/chat/token_usage_tracking/": {"canonical": "/docs/how_to/chat_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/chat/token_usage_tracking/"]},
|
||||
"/docs/modules/model_io/models/concepts/": {"canonical": "/docs/concepts/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/concepts/"]},
|
||||
"/docs/modules/model_io/models/llms/": {"canonical": "/docs/concepts/#llms", "alternative": ["/v0.1/docs/modules/model_io/llms/"]},
|
||||
"/docs/modules/model_io/models/llms/custom_llm/": {"canonical": "/docs/how_to/custom_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/custom_llm/"]},
|
||||
"/docs/modules/model_io/models/llms/llm_caching/": {"canonical": "/docs/how_to/llm_caching/", "alternative": ["/v0.1/docs/modules/model_io/llms/llm_caching/"]},
|
||||
"/docs/modules/model_io/models/llms/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/llms/quick_start/"]},
|
||||
"/docs/modules/model_io/models/llms/streaming_llm/": {"canonical": "/docs/how_to/streaming_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/streaming_llm/"]},
|
||||
"/docs/modules/model_io/models/llms/token_usage_tracking/": {"canonical": "/docs/how_to/llm_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/llms/token_usage_tracking/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/": {"canonical": "/docs/how_to/#output-parsers", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/custom/": {"canonical": "/docs/how_to/output_parser_custom/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/custom/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/quick_start/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/quick_start/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/csv/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/csv/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/datetime/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/datetime/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/enum/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/enum/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/json/": {"canonical": "/docs/how_to/output_parser_json/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/json/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/openai_functions/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/openai_functions/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/openai_tools/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/openai_tools/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/output_fixing/": {"canonical": "/docs/how_to/output_parser_fixing/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/output_fixing/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/pandas_dataframe/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/pandas_dataframe/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/pydantic/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/pydantic/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/retry/": {"canonical": "/docs/how_to/output_parser_retry/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/retry/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/structured/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/structured/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/xml/": {"canonical": "/docs/how_to/output_parser_xml/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/xml/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/yaml/": {"canonical": "/docs/how_to/output_parser_yaml/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/yaml/"]},
|
||||
"/docs/modules/model_io/models/prompts/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/"]},
|
||||
"/docs/modules/model_io/models/prompts/composition/": {"canonical": "/docs/how_to/prompts_composition/", "alternative": ["/v0.1/docs/modules/model_io/prompts/composition/"]},
|
||||
"/docs/modules/model_io/models/prompts/example_selectors/": {"canonical": "/docs/how_to/example_selectors/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/"]},
|
||||
"/docs/modules/model_io/models/prompts/example_selectors/length_based/": {"canonical": "/docs/how_to/example_selectors_length_based/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/length_based/"]},
|
||||
"/docs/modules/model_io/models/prompts/example_selectors/mmr/": {"canonical": "/docs/how_to/example_selectors_mmr/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/mmr/"]},
|
||||
"/docs/modules/model_io/models/prompts/example_selectors/ngram_overlap/": {"canonical": "/docs/how_to/example_selectors_ngram/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/ngram_overlap/"]},
|
||||
"/docs/modules/model_io/models/prompts/example_selectors/similarity/": {"canonical": "/docs/how_to/example_selectors_similarity/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/similarity/"]},
|
||||
"/docs/modules/model_io/models/prompts/few_shot_examples_chat/": {"canonical": "/docs/how_to/few_shot_examples_chat/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples_chat/"]},
|
||||
"/docs/modules/model_io/models/prompts/few_shot_examples/": {"canonical": "/docs/how_to/few_shot_examples/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples/"]},
|
||||
"/docs/modules/model_io/models/prompts/partial/": {"canonical": "/docs/how_to/prompts_partial/", "alternative": ["/v0.1/docs/modules/model_io/prompts/partial/"]},
|
||||
"/docs/modules/model_io/models/prompts/quick_start/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/quick_start/"]},
|
||||
"/docs/modules/model_io/models/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/quick_start/"]},
|
||||
"/docs/use_cases/more/graph/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/"]},
|
||||
"/docs/use_cases/more/graph/constructing/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/constructing/"]},
|
||||
"/docs/use_cases/more/graph/mapping/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/mapping/"]},
|
||||
"/docs/use_cases/more/graph/prompting/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/prompting/"]},
|
||||
"/docs/use_cases/more/graph/quickstart/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/quickstart/"]},
|
||||
"/docs/use_cases/more/graph/semantic/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/semantic/"]},
|
||||
"/docs/modules/model_io/chat/how_to/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/chat/"]},
|
||||
"/docs/modules/model_io/chat/how_to/chat_model_caching/": {"canonical": "/docs/how_to/chat_model_caching/", "alternative": ["/v0.1/docs/modules/model_io/chat/chat_model_caching/"]},
|
||||
"/docs/modules/model_io/chat/how_to/custom_chat_model/": {"canonical": "/docs/how_to/custom_chat_model/", "alternative": ["/v0.1/docs/modules/model_io/chat/custom_chat_model/"]},
|
||||
"/docs/modules/model_io/chat/how_to/function_calling/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/model_io/chat/function_calling/"]},
|
||||
"/docs/modules/model_io/chat/how_to/logprobs/": {"canonical": "/docs/how_to/logprobs/", "alternative": ["/v0.1/docs/modules/model_io/chat/logprobs/"]},
|
||||
"/docs/modules/model_io/chat/how_to/message_types/": {"canonical": "/docs/concepts/#messages", "alternative": ["/v0.1/docs/modules/model_io/chat/message_types/"]},
|
||||
"/docs/modules/model_io/chat/how_to/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/chat/quick_start/"]},
|
||||
"/docs/modules/model_io/chat/how_to/response_metadata/": {"canonical": "/docs/how_to/response_metadata/", "alternative": ["/v0.1/docs/modules/model_io/chat/response_metadata/"]},
|
||||
"/docs/modules/model_io/chat/how_to/streaming/": {"canonical": "/docs/how_to/streaming/", "alternative": ["/v0.1/docs/modules/model_io/chat/streaming/"]},
|
||||
"/docs/modules/model_io/chat/how_to/structured_output/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/chat/structured_output/"]},
|
||||
"/docs/modules/model_io/chat/how_to/token_usage_tracking/": {"canonical": "/docs/how_to/chat_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/chat/token_usage_tracking/"]},
|
||||
"/docs/modules/model_io/llms/how_to/": {"canonical": "/docs/concepts/#llms", "alternative": ["/v0.1/docs/modules/model_io/llms/"]},
|
||||
"/docs/modules/model_io/llms/how_to/custom_llm/": {"canonical": "/docs/how_to/custom_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/custom_llm/"]},
|
||||
"/docs/modules/model_io/llms/how_to/llm_caching/": {"canonical": "/docs/how_to/llm_caching/", "alternative": ["/v0.1/docs/modules/model_io/llms/llm_caching/"]},
|
||||
"/docs/modules/model_io/llms/how_to/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/llms/quick_start/"]},
|
||||
"/docs/modules/model_io/llms/how_to/streaming_llm/": {"canonical": "/docs/how_to/streaming_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/streaming_llm/"]},
|
||||
"/docs/modules/model_io/llms/how_to/token_usage_tracking/": {"canonical": "/docs/how_to/llm_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/llms/token_usage_tracking/"]},
|
||||
"/docs/modules/model_io/llms/integrations/llm_caching/": {"canonical": "/docs/how_to/llm_caching/", "alternative": ["/v0.1/docs/integrations/llms/llm_caching/"]},
|
||||
"/docs/modules/model_io/chat/integrations/ollama_functions/": {"canonical": "/docs/integrations/chat/ollama/", "alternative": ["/v0.1/docs/integrations/chat/ollama_functions/"]},
|
||||
"/en/latest/modules/models/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/"]},
|
||||
"/en/latest/modules/models/chat/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/chat/"]},
|
||||
"/en/latest/modules/models/chat/chat_model_caching/": {"canonical": "/docs/how_to/chat_model_caching/", "alternative": ["/v0.1/docs/modules/model_io/chat/chat_model_caching/"]},
|
||||
"/en/latest/modules/models/chat/custom_chat_model/": {"canonical": "/docs/how_to/custom_chat_model/", "alternative": ["/v0.1/docs/modules/model_io/chat/custom_chat_model/"]},
|
||||
"/en/latest/modules/models/chat/function_calling/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/model_io/chat/function_calling/"]},
|
||||
"/en/latest/modules/models/chat/logprobs/": {"canonical": "/docs/how_to/logprobs/", "alternative": ["/v0.1/docs/modules/model_io/chat/logprobs/"]},
|
||||
"/en/latest/modules/models/chat/message_types/": {"canonical": "/docs/concepts/#messages", "alternative": ["/v0.1/docs/modules/model_io/chat/message_types/"]},
|
||||
"/en/latest/modules/models/chat/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/chat/quick_start/"]},
|
||||
"/en/latest/modules/models/chat/response_metadata/": {"canonical": "/docs/how_to/response_metadata/", "alternative": ["/v0.1/docs/modules/model_io/chat/response_metadata/"]},
|
||||
"/en/latest/modules/models/chat/streaming/": {"canonical": "/docs/how_to/streaming/", "alternative": ["/v0.1/docs/modules/model_io/chat/streaming/"]},
|
||||
"/en/latest/modules/models/chat/structured_output/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/chat/structured_output/"]},
|
||||
"/en/latest/modules/models/chat/token_usage_tracking/": {"canonical": "/docs/how_to/chat_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/chat/token_usage_tracking/"]},
|
||||
"/en/latest/modules/models/concepts/": {"canonical": "/docs/concepts/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/concepts/"]},
|
||||
"/en/latest/modules/models/llms/": {"canonical": "/docs/concepts/#llms", "alternative": ["/v0.1/docs/modules/model_io/llms/"]},
|
||||
"/en/latest/modules/models/llms/custom_llm/": {"canonical": "/docs/how_to/custom_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/custom_llm/"]},
|
||||
"/en/latest/modules/models/llms/llm_caching/": {"canonical": "/docs/how_to/llm_caching/", "alternative": ["/v0.1/docs/modules/model_io/llms/llm_caching/"]},
|
||||
"/en/latest/modules/models/llms/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/llms/quick_start/"]},
|
||||
"/en/latest/modules/models/llms/streaming_llm/": {"canonical": "/docs/how_to/streaming_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/streaming_llm/"]},
|
||||
"/en/latest/modules/models/llms/token_usage_tracking/": {"canonical": "/docs/how_to/llm_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/llms/token_usage_tracking/"]},
|
||||
"/en/latest/modules/models/output_parsers/": {"canonical": "/docs/how_to/#output-parsers", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/"]},
|
||||
"/en/latest/modules/models/output_parsers/custom/": {"canonical": "/docs/how_to/output_parser_custom/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/custom/"]},
|
||||
"/en/latest/modules/models/output_parsers/quick_start/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/quick_start/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/csv/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/csv/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/datetime/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/datetime/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/enum/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/enum/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/json/": {"canonical": "/docs/how_to/output_parser_json/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/json/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/openai_functions/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/openai_functions/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/openai_tools/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/openai_tools/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/output_fixing/": {"canonical": "/docs/how_to/output_parser_fixing/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/output_fixing/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/pandas_dataframe/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/pandas_dataframe/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/pydantic/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/pydantic/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/retry/": {"canonical": "/docs/how_to/output_parser_retry/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/retry/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/structured/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/structured/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/xml/": {"canonical": "/docs/how_to/output_parser_xml/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/xml/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/yaml/": {"canonical": "/docs/how_to/output_parser_yaml/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/yaml/"]},
|
||||
"/en/latest/modules/models/prompts/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/"]},
|
||||
"/en/latest/modules/models/prompts/composition/": {"canonical": "/docs/how_to/prompts_composition/", "alternative": ["/v0.1/docs/modules/model_io/prompts/composition/"]},
|
||||
"/en/latest/modules/models/prompts/example_selectors/": {"canonical": "/docs/how_to/example_selectors/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/"]},
|
||||
"/en/latest/modules/models/prompts/example_selectors/length_based/": {"canonical": "/docs/how_to/example_selectors_length_based/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/length_based/"]},
|
||||
"/en/latest/modules/models/prompts/example_selectors/mmr/": {"canonical": "/docs/how_to/example_selectors_mmr/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/mmr/"]},
|
||||
"/en/latest/modules/models/prompts/example_selectors/ngram_overlap/": {"canonical": "/docs/how_to/example_selectors_ngram/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/ngram_overlap/"]},
|
||||
"/en/latest/modules/models/prompts/example_selectors/similarity/": {"canonical": "/docs/how_to/example_selectors_similarity/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/similarity/"]},
|
||||
"/en/latest/modules/models/prompts/few_shot_examples_chat/": {"canonical": "/docs/how_to/few_shot_examples_chat/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples_chat/"]},
|
||||
"/en/latest/modules/models/prompts/few_shot_examples/": {"canonical": "/docs/how_to/few_shot_examples/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples/"]},
|
||||
"/en/latest/modules/models/prompts/partial/": {"canonical": "/docs/how_to/prompts_partial/", "alternative": ["/v0.1/docs/modules/model_io/prompts/partial/"]},
|
||||
"/en/latest/modules/models/prompts/quick_start/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/quick_start/"]},
|
||||
"/en/latest/modules/models/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/quick_start/"]},
|
||||
"/docs/modules/model_io/prompts/example_selector_types/": {"canonical": "/docs/how_to/example_selectors/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/"]},
|
||||
"/docs/modules/model_io/prompts/example_selector_types/length_based/": {"canonical": "/docs/how_to/example_selectors_length_based/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/length_based/"]},
|
||||
"/docs/modules/model_io/prompts/example_selector_types/mmr/": {"canonical": "/docs/how_to/example_selectors_mmr/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/mmr/"]},
|
||||
"/docs/modules/model_io/prompts/example_selector_types/ngram_overlap/": {"canonical": "/docs/how_to/example_selectors_ngram/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/ngram_overlap/"]},
|
||||
"/docs/modules/model_io/prompts/example_selector_types/similarity/": {"canonical": "/docs/how_to/example_selectors_similarity/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/similarity/"]},
|
||||
"/docs/modules/agents/tools/": {"canonical": "/docs/how_to/#tools", "alternative": ["/v0.1/docs/modules/tools/"]},
|
||||
"/docs/modules/agents/tools/custom_tools/": {"canonical": "/docs/how_to/custom_tools/", "alternative": ["/v0.1/docs/modules/tools/custom_tools/"]},
|
||||
"/docs/modules/agents/tools/toolkits/": {"canonical": "/docs/how_to/#tools", "alternative": ["/v0.1/docs/modules/tools/toolkits/"]},
|
||||
"/docs/modules/agents/tools/tools_as_openai_functions/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/tools/tools_as_openai_functions/"]},
|
||||
"/docs/guides/deployments/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/deployments/"]},
|
||||
"/docs/guides/deployments/template_repos/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/deployments/template_repos/"]},
|
||||
"/docs/guides/evaluation/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/"]},
|
||||
"/docs/guides/evaluation/comparison/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/comparison/"]},
|
||||
"/docs/guides/evaluation/comparison/custom/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/comparison/custom/"]},
|
||||
"/docs/guides/evaluation/comparison/pairwise_embedding_distance/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/comparison/pairwise_embedding_distance/"]},
|
||||
"/docs/guides/evaluation/comparison/pairwise_string/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/comparison/pairwise_string/"]},
|
||||
"/docs/guides/evaluation/examples/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/examples/"]},
|
||||
"/docs/guides/evaluation/examples/comparisons/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/examples/comparisons/"]},
|
||||
"/docs/guides/evaluation/string/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/"]},
|
||||
"/docs/guides/evaluation/string/criteria_eval_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/criteria_eval_chain/"]},
|
||||
"/docs/guides/evaluation/string/custom/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/custom/"]},
|
||||
"/docs/guides/evaluation/string/embedding_distance/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/embedding_distance/"]},
|
||||
"/docs/guides/evaluation/string/exact_match/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/exact_match/"]},
|
||||
"/docs/guides/evaluation/string/json/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/json/"]},
|
||||
"/docs/guides/evaluation/string/regex_match/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/regex_match/"]},
|
||||
"/docs/guides/evaluation/string/scoring_eval_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/scoring_eval_chain/"]},
|
||||
"/docs/guides/evaluation/string/string_distance/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/string_distance/"]},
|
||||
"/docs/guides/evaluation/trajectory/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/trajectory/"]},
|
||||
"/docs/guides/evaluation/trajectory/custom/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/trajectory/custom/"]},
|
||||
"/docs/guides/evaluation/trajectory/trajectory_eval/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/trajectory/trajectory_eval/"]},
|
||||
"/docs/guides/privacy/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/"]},
|
||||
"/docs/guides/privacy/amazon_comprehend_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/amazon_comprehend_chain/"]},
|
||||
"/docs/guides/privacy/constitutional_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/constitutional_chain/"]},
|
||||
"/docs/guides/privacy/hugging_face_prompt_injection/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/hugging_face_prompt_injection/"]},
|
||||
"/docs/guides/privacy/layerup_security/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/layerup_security/"]},
|
||||
"/docs/guides/privacy/logical_fallacy_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/logical_fallacy_chain/"]},
|
||||
"/docs/guides/privacy/moderation/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/moderation/"]},
|
||||
"/docs/guides/privacy/presidio_data_anonymization/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/"]},
|
||||
"/docs/guides/privacy/presidio_data_anonymization/multi_language/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/multi_language/"]},
|
||||
"/docs/guides/privacy/presidio_data_anonymization/qa_privacy_protection/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/"]},
|
||||
"/docs/guides/privacy/presidio_data_anonymization/reversible/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/reversible/"]},
|
||||
"/docs/guides/safety/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/"]},
|
||||
"/docs/guides/safety/amazon_comprehend_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/amazon_comprehend_chain/"]},
|
||||
"/docs/guides/safety/constitutional_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/constitutional_chain/"]},
|
||||
"/docs/guides/safety/hugging_face_prompt_injection/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/hugging_face_prompt_injection/"]},
|
||||
"/docs/guides/safety/layerup_security/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/layerup_security/"]},
|
||||
"/docs/guides/safety/logical_fallacy_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/logical_fallacy_chain/"]},
|
||||
"/docs/guides/safety/moderation/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/moderation/"]},
|
||||
"/docs/guides/safety/presidio_data_anonymization/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/"]},
|
||||
"/docs/guides/safety/presidio_data_anonymization/multi_language/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/multi_language/"]},
|
||||
"/docs/guides/safety/presidio_data_anonymization/qa_privacy_protection/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/"]},
|
||||
"/docs/guides/safety/presidio_data_anonymization/reversible/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/reversible/"]},
|
||||
"/docs/integrations/llms/titan_takeoff_pro/": {"canonical": "/docs/integrations/llms/titan_takeoff/"},
|
||||
"/docs/integrations/providers/optimum_intel/": {"canonical": "/docs/integrations/providers/intel/"},
|
||||
"/docs/use_cases/graph/integrations/diffbot_graphtransformer/": {"canonical": "/docs/integrations/graphs/diffbot/"},
|
||||
"/docs/use_cases/graph/integrations/graph_arangodb_qa/": {"canonical": "/docs/integrations/graphs/arangodb/"},
|
||||
"/docs/use_cases/graph/integrations/graph_cypher_qa/": {"canonical": "/docs/integrations/graphs/neo4j_cypher/"},
|
||||
"/docs/use_cases/graph/integrations/graph_falkordb_qa/": {"canonical": "/docs/integrations/graphs/falkordb/"},
|
||||
"/docs/use_cases/graph/integrations/graph_gremlin_cosmosdb_qa/": {"canonical": "/docs/integrations/graphs/azure_cosmosdb_gremlin/"},
|
||||
"/docs/use_cases/graph/integrations/graph_hugegraph_qa/": {"canonical": "/docs/integrations/graphs/hugegraph/"},
|
||||
"/docs/use_cases/graph/integrations/graph_kuzu_qa/": {"canonical": "/docs/integrations/graphs/kuzu_db/"},
|
||||
"/docs/use_cases/graph/integrations/graph_memgraph_qa/": {"canonical": "/docs/integrations/graphs/memgraph/"},
|
||||
"/docs/use_cases/graph/integrations/graph_nebula_qa/": {"canonical": "/docs/integrations/graphs/nebula_graph/"},
|
||||
"/docs/use_cases/graph/integrations/graph_networkx_qa/": {"canonical": "/docs/integrations/graphs/networkx/"},
|
||||
"/docs/use_cases/graph/integrations/graph_ontotext_graphdb_qa/": {"canonical": "/docs/integrations/graphs/ontotext/"},
|
||||
"/docs/use_cases/graph/integrations/graph_sparql_qa/": {"canonical": "/docs/integrations/graphs/rdflib_sparql/"},
|
||||
"/docs/use_cases/graph/integrations/neptune_cypher_qa/": {"canonical": "/docs/integrations/graphs/amazon_neptune_open_cypher/"},
|
||||
"/docs/use_cases/graph/integrations/neptune_sparql_qa/": {"canonical": "/docs/integrations/graphs/amazon_neptune_sparql/"},
|
||||
"/docs/integrations/providers/facebook_chat/": {"canonical": "/docs/integrations/providers/facebook/"},
|
||||
"/docs/integrations/providers/facebook_faiss/": {"canonical": "/docs/integrations/providers/facebook/"},
|
||||
"/docs/integrations/memory/google_cloud_sql_mssql/": {"canonical": "/docs/integrations/memory/google_sql_mssql/"},
|
||||
"/docs/integrations/memory/google_cloud_sql_mysql/": {"canonical": "/docs/integrations/memory/google_sql_mysql/"},
|
||||
"/docs/integrations/memory/google_cloud_sql_pg/": {"canonical": "/docs/integrations/memory/google_sql_pg/"},
|
||||
"/docs/integrations/memory/google_datastore/": {"canonical": "/docs/integrations/memory/google_firestore_datastore/"},
|
||||
"/docs/integrations/llms/huggingface_textgen_inference/": {"canonical": "/docs/integrations/llms/huggingface_endpoint/"},
|
||||
"/docs/integrations/llms/huggingface_hub/": {"canonical": "/docs/integrations/llms/huggingface_endpoint/"},
|
||||
"/docs/integrations/llms/bigdl/": {"canonical": "/docs/integrations/llms/ipex_llm/"},
|
||||
"/docs/integrations/llms/watsonxllm/": {"canonical": "/docs/integrations/llms/ibm_watsonx/"},
|
||||
"/docs/integrations/llms/pai_eas_endpoint/": {"canonical": "/docs/integrations/llms/alibabacloud_pai_eas_endpoint/"},
|
||||
"/docs/integrations/vectorstores/hanavector/": {"canonical": "/docs/integrations/vectorstores/sap_hanavector/"},
|
||||
"/docs/use_cases/qa_structured/sql/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/"]},
|
||||
"/docs/contributing/packages/": {"canonical": "/docs/versions/release_policy/", "alternative": ["/v0.1/docs/packages/"]},
|
||||
"/docs/community/": {"canonical": "/docs/contributing/"},
|
||||
"/docs/modules/chains/(.+)/": {"canonical": "/docs/versions/migrating_chains/", "alternative": ["/v0.1/docs/modules/chains/"]},
|
||||
"/docs/modules/agents/how_to/custom_llm_agent/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/how_to/custom_agent/"]},
|
||||
"/docs/modules/agents/how_to/custom-functions-with-openai-functions-agent/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/how_to/custom_agent/"]},
|
||||
"/docs/modules/agents/how_to/custom_llm_chat_agent/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/how_to/custom_agent/"]},
|
||||
"/docs/modules/agents/how_to/custom_mrkl_agent/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/how_to/custom_agent/"]},
|
||||
"/docs/modules/agents/how_to/streaming_stdout_final_only/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/how_to/streaming/"]},
|
||||
"/docs/modules/model_io/prompts/prompts_pipelining/": {"canonical": "/docs/how_to/prompts_composition/", "alternative": ["/v0.1/docs/modules/model_io/prompts/composition/"]},
|
||||
"/docs/modules/model_io/output_parsers/enum/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/enum/"]},
|
||||
"/docs/modules/model_io/output_parsers/pandas_dataframe/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/pandas_dataframe/"]},
|
||||
"/docs/modules/model_io/output_parsers/structured/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/structured/"]},
|
||||
"/docs/modules/model_io/output_parsers/xml/": {"canonical": "/docs/how_to/output_parser_xml/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/xml/"]},
|
||||
"/docs/use_cases/question_answering/code_understanding/": {"canonical": "https://langchain-ai.github.io/langgraph/tutorials/code_assistant/langgraph_code_assistant/", "alternative": ["/v0.1/docs/use_cases/code_understanding/"]},
|
||||
"/docs/use_cases/question_answering/document-context-aware-QA/": {"canonical": "/docs/how_to/#text-splitters", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/"]},
|
||||
"/docs/integrations/providers/alibabacloud_opensearch/": {"canonical": "/docs/integrations/providers/alibaba_cloud/"},
|
||||
"/docs/integrations/chat/pai_eas_chat_endpoint/": {"canonical": "/docs/integrations/chat/alibaba_cloud_pai_eas/"},
|
||||
"/docs/integrations/providers/tencentvectordb/": {"canonical": "/docs/integrations/providers/tencent/"},
|
||||
"/docs/integrations/chat/hunyuan/": {"canonical": "/docs/integrations/chat/tencent_hunyuan/"},
|
||||
"/docs/integrations/document_loaders/excel/": {"canonical": "/docs/integrations/document_loaders/microsoft_excel/"},
|
||||
"/docs/integrations/document_loaders/onenote/": {"canonical": "/docs/integrations/document_loaders/microsoft_onenote/"},
|
||||
"/docs/integrations/providers/aws_dynamodb/": {"canonical": "/docs/integrations/platforms/aws/"},
|
||||
"/docs/integrations/providers/scann/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/toolkits/google_drive/": {"canonical": "/docs/integrations/tools/google_drive/"},
|
||||
"/docs/use_cases/question_answering/chat_vector_db/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/in_memory_question_answering/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/multi_retrieval_qa_router/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/multiple_retrieval/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/vector_db_qa/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/vector_db_text_generation/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/guides/langsmith/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/langsmith/"]},
|
||||
"/docs/guides/langsmith/walkthrough/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/langsmith/walkthrough/"]},
|
||||
"/docs/use_cases/qa_structured/integrations/sqlite/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/"]},
|
||||
"/docs/use_cases/more/data_generation/": {"canonical": "/docs/tutorials/data_generation/", "alternative": ["/v0.1/docs/use_cases/data_generation/"]},
|
||||
"/docs/use_cases/question_answering/how_to/chat_vector_db/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/how_to/conversational_retrieval_agents/": {"canonical": "/docs/tutorials/qa_chat_history/", "alternative": ["/v0.1/docs/use_cases/question_answering/conversational_retrieval_agents/"]},
|
||||
"/docs/use_cases/question_answering/question_answering/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/how_to/local_retrieval_qa/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/local_retrieval_qa/"]},
|
||||
"/docs/use_cases/question_answering/how_to/question_answering/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/modules/agents/agents/examples/mrkl_chat(.html?)/": {"canonical": "/docs/how_to/#agents", "alternative": ["/v0.1/docs/modules/agents/"]},
|
||||
"/docs/integrations/": {"canonical": "/docs/integrations/providers/"},
|
||||
"/docs/expression_language/cookbook/routing/": {"canonical": "/docs/how_to/routing/", "alternative": ["/v0.1/docs/expression_language/how_to/routing/"]},
|
||||
"/docs/guides/expression_language/": {"canonical": "/docs/how_to/#langchain-expression-language-lcel", "alternative": ["/v0.1/docs/expression_language/"]},
|
||||
"/docs/integrations/providers/amazon_api_gateway/": {"canonical": "/docs/integrations/platforms/aws/"},
|
||||
"/docs/integrations/providers/huggingface/": {"canonical": "/docs/integrations/platforms/huggingface/"},
|
||||
"/docs/integrations/providers/azure_blob_storage/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/google_vertexai_matchingengine/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/providers/aws_s3/": {"canonical": "/docs/integrations/platforms/aws/"},
|
||||
"/docs/integrations/providers/azure_openai/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/azure_cognitive_search_/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/bedrock/": {"canonical": "/docs/integrations/platforms/aws/"},
|
||||
"/docs/integrations/providers/google_bigquery/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/providers/google_cloud_storage/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/providers/google_drive/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/providers/google_search/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/providers/microsoft_onedrive/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/microsoft_powerpoint/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/microsoft_word/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/sagemaker_endpoint/": {"canonical": "/docs/integrations/platforms/aws/"},
|
||||
"/docs/integrations/providers/sagemaker_tracking/": {"canonical": "/docs/integrations/callbacks/sagemaker_tracking/"},
|
||||
"/docs/integrations/providers/openai/": {"canonical": "/docs/integrations/platforms/openai/"},
|
||||
"/docs/integrations/cassandra/": {"canonical": "/docs/integrations/providers/cassandra/"},
|
||||
"/docs/integrations/providers/providers/semadb/": {"canonical": "/docs/integrations/providers/semadb/"},
|
||||
"/docs/integrations/vectorstores/vectorstores/semadb/": {"canonical": "/docs/integrations/vectorstores/semadb/"},
|
||||
"/docs/integrations/vectorstores/async_faiss/": {"canonical": "/docs/integrations/vectorstores/faiss_async/"},
|
||||
"/docs/integrations/vectorstores/matchingengine/": {"canonical": "/docs/integrations/vectorstores/google_vertex_ai_vector_search/"},
|
||||
"/docs/integrations/tools/sqlite/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/"]},
|
||||
"/docs/integrations/document_loaders/pdf-amazonTextractPDFLoader/": {"canonical": "/docs/integrations/document_loaders/amazon_textract/"},
|
||||
"/docs/integrations/document_loaders/Etherscan/": {"canonical": "/docs/integrations/document_loaders/etherscan/"},
|
||||
"/docs/integrations/document_loaders/merge_doc_loader/": {"canonical": "/docs/integrations/document_loaders/merge_doc/"},
|
||||
"/docs/integrations/document_loaders/recursive_url_loader/": {"canonical": "/docs/integrations/document_loaders/recursive_url/"},
|
||||
"/docs/integrations/providers/google_document_ai/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/memory/motorhead_memory_managed/": {"canonical": "/docs/integrations/memory/motorhead_memory/"},
|
||||
"/docs/integrations/memory/dynamodb_chat_message_history/": {"canonical": "/docs/integrations/memory/aws_dynamodb/"},
|
||||
"/docs/integrations/memory/entity_memory_with_sqlite/": {"canonical": "/docs/integrations/memory/sqlite/"},
|
||||
"/docs/modules/model_io/chat/integrations/anthropic/": {"canonical": "/docs/integrations/chat/anthropic/"},
|
||||
"/docs/modules/model_io/chat/integrations/azure_chat_openai/": {"canonical": "/docs/integrations/chat/azure_chat_openai/"},
|
||||
"/docs/modules/model_io/chat/integrations/google_vertex_ai_palm/": {"canonical": "/docs/integrations/chat/google_vertex_ai_palm/"},
|
||||
"/docs/modules/model_io/chat/integrations/openai/": {"canonical": "/docs/integrations/chat/openai/"},
|
||||
"/docs/modules/model_io/chat/integrations/promptlayer_chatopenai/": {"canonical": "/docs/integrations/chat/promptlayer_chatopenai/"},
|
||||
"/docs/modules/model_io/llms/integrations/ai21/": {"canonical": "/docs/integrations/llms/ai21/"},
|
||||
"/docs/modules/model_io/llms/integrations/aleph_alpha/": {"canonical": "/docs/integrations/llms/aleph_alpha/"},
|
||||
"/docs/modules/model_io/llms/integrations/anyscale/": {"canonical": "/docs/integrations/llms/anyscale/"},
|
||||
"/docs/modules/model_io/llms/integrations/banana/": {"canonical": "/docs/integrations/llms/banana/"},
|
||||
"/docs/modules/model_io/llms/integrations/baseten/": {"canonical": "/docs/integrations/llms/baseten/"},
|
||||
"/docs/modules/model_io/llms/integrations/beam/": {"canonical": "/docs/integrations/llms/beam/"},
|
||||
"/docs/modules/model_io/llms/integrations/bedrock/": {"canonical": "/docs/integrations/llms/bedrock/"},
|
||||
"/docs/modules/model_io/llms/integrations/cohere/": {"canonical": "/docs/integrations/llms/cohere/"},
|
||||
"/docs/modules/model_io/llms/integrations/ctransformers/": {"canonical": "/docs/integrations/llms/ctransformers/"},
|
||||
"/docs/modules/model_io/llms/integrations/databricks/": {"canonical": "/docs/integrations/llms/databricks/"},
|
||||
"/docs/modules/model_io/llms/integrations/google_vertex_ai_palm/": {"canonical": "/docs/integrations/llms/google_vertex_ai_palm/"},
|
||||
"/docs/modules/model_io/llms/integrations/huggingface_pipelines/": {"canonical": "/docs/integrations/llms/huggingface_pipelines/"},
|
||||
"/docs/modules/model_io/llms/integrations/jsonformer_experimental/": {"canonical": "/docs/integrations/llms/jsonformer_experimental/"},
|
||||
"/docs/modules/model_io/llms/integrations/llamacpp/": {"canonical": "/docs/integrations/llms/llamacpp/"},
|
||||
"/docs/modules/model_io/llms/integrations/manifest/": {"canonical": "/docs/integrations/llms/manifest/"},
|
||||
"/docs/modules/model_io/llms/integrations/modal/": {"canonical": "/docs/integrations/llms/modal/"},
|
||||
"/docs/modules/model_io/llms/integrations/mosaicml/": {"canonical": "/docs/integrations/llms/mosaicml/"},
|
||||
"/docs/modules/model_io/llms/integrations/nlpcloud/": {"canonical": "/docs/integrations/llms/nlpcloud/"},
|
||||
"/docs/modules/model_io/llms/integrations/openai/": {"canonical": "/docs/integrations/llms/openai/"},
|
||||
"/docs/modules/model_io/llms/integrations/openlm/": {"canonical": "/docs/integrations/llms/openlm/"},
|
||||
"/docs/modules/model_io/llms/integrations/predictionguard/": {"canonical": "/docs/integrations/llms/predictionguard/"},
|
||||
"/docs/modules/model_io/llms/integrations/promptlayer_openai/": {"canonical": "/docs/integrations/llms/promptlayer_openai/"},
|
||||
"/docs/modules/model_io/llms/integrations/rellm_experimental/": {"canonical": "/docs/integrations/llms/rellm_experimental/"},
|
||||
"/docs/modules/model_io/llms/integrations/replicate/": {"canonical": "/docs/integrations/llms/replicate/"},
|
||||
"/docs/modules/model_io/llms/integrations/runhouse/": {"canonical": "/docs/integrations/llms/runhouse/"},
|
||||
"/docs/modules/model_io/llms/integrations/sagemaker/": {"canonical": "/docs/integrations/llms/sagemaker/"},
|
||||
"/docs/modules/model_io/llms/integrations/stochasticai/": {"canonical": "/docs/integrations/llms/stochasticai/"},
|
||||
"/docs/modules/model_io/llms/integrations/writer/": {"canonical": "/docs/integrations/llms/writer/"},
|
||||
"/en/latest/use_cases/apis.html/": {"canonical": null, "alternative": ["/v0.1/docs/use_cases/apis/"]},
|
||||
"/en/latest/use_cases/extraction.html/": {"canonical": "/docs/tutorials/extraction/", "alternative": ["/v0.1/docs/use_cases/extraction/"]},
|
||||
"/en/latest/use_cases/summarization.html/": {"canonical": "/docs/tutorials/summarization/", "alternative": ["/v0.1/docs/use_cases/summarization/"]},
|
||||
"/en/latest/use_cases/tabular.html/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/"]},
|
||||
"/en/latest/youtube.html/": {"canonical": "/docs/additional_resources/youtube/"},
|
||||
"/docs/": {"canonical": "/"},
|
||||
"/en/latest/": {"canonical": "/"},
|
||||
"/en/latest/index.html/": {"canonical": "/"},
|
||||
"/en/latest/modules/models.html/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/"]},
|
||||
"/docs/integrations/retrievers/google_cloud_enterprise_search/": {"canonical": "/docs/integrations/retrievers/google_vertex_ai_search/"},
|
||||
"/docs/integrations/tools/metaphor_search/": {"canonical": "/docs/integrations/tools/exa_search/"},
|
||||
"/docs/expression_language/how_to/fallbacks/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/fallbacks/"]},
|
||||
"/docs/expression_language/cookbook/retrieval/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/expression_language/cookbook/agent/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/agent_types/xml_agent/"]},
|
||||
"/docs/modules/model_io/prompts/message_prompts/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/quick_start/"]},
|
||||
"/docs/modules/model_io/prompts/pipeline/": {"canonical": "/docs/how_to/prompts_composition/", "alternative": ["/v0.1/docs/modules/model_io/prompts/composition/"]},
|
||||
"/docs/expression_language/cookbook/memory/": {"canonical": "/docs/how_to/chatbots_memory/", "alternative": ["/v0.1/docs/modules/memory/"]},
|
||||
"/docs/expression_language/cookbook/tools/": {"canonical": "/docs/tutorials/agents/", "alternative": ["/v0.1/docs/use_cases/tool_use/quickstart/"]},
|
||||
"/docs/expression_language/cookbook/sql_db/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/quickstart/"]},
|
||||
"/docs/expression_language/cookbook/moderation/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/moderation/"]},
|
||||
"/docs/expression_language/cookbook/embedding_router/": {"canonical": "/docs/how_to/routing/", "alternative": ["/v0.1/docs/expression_language/how_to/routing/"]},
|
||||
"/docs/guides/structured_output/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/chat/structured_output/"]},
|
||||
"/docs/modules/agents/how_to/structured_tools/": {"canonical": "/docs/how_to/#tools", "alternative": ["/v0.1/docs/modules/tools/"]},
|
||||
"/docs/use_cases/csv/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/csv/"]},
|
||||
"/docs/guides/debugging/": {"canonical": "/docs/how_to/debugging/", "alternative": ["/v0.1/docs/guides/development/debugging/"]},
|
||||
"/docs/guides/extending_langchain/": {"canonical": "/docs/how_to/#custom", "alternative": ["/v0.1/docs/guides/development/extending_langchain/"]},
|
||||
"/docs/guides/fallbacks/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/fallbacks/"]},
|
||||
"/docs/guides/model_laboratory/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/"]},
|
||||
"/docs/guides/pydantic_compatibility/": {"canonical": "/docs/how_to/pydantic_compatibility/", "alternative": ["/v0.1/docs/guides/development/pydantic_compatibility/"]},
|
||||
"/docs/guides/local_llms/": {"canonical": "/docs/how_to/local_llms/", "alternative": ["/v0.1/docs/guides/development/local_llms/"]},
|
||||
"/docs/modules/model_io/quick_start/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/"]},
|
||||
"/docs/expression_language/how_to/generators/": {"canonical": "/docs/how_to/functions/", "alternative": ["/v0.1/docs/expression_language/primitives/functions/"]},
|
||||
"/docs/expression_language/how_to/functions/": {"canonical": "/docs/how_to/functions/", "alternative": ["/v0.1/docs/expression_language/primitives/functions/"]},
|
||||
"/docs/expression_language/how_to/passthrough/": {"canonical": "/docs/how_to/passthrough/", "alternative": ["/v0.1/docs/expression_language/primitives/passthrough/"]},
|
||||
"/docs/expression_language/how_to/map/": {"canonical": "/docs/how_to/parallel/", "alternative": ["/v0.1/docs/expression_language/primitives/parallel/"]},
|
||||
"/docs/expression_language/how_to/binding/": {"canonical": "/docs/how_to/binding/", "alternative": ["/v0.1/docs/expression_language/primitives/binding/"]},
|
||||
"/docs/expression_language/how_to/configure/": {"canonical": "/docs/how_to/configure/", "alternative": ["/v0.1/docs/expression_language/primitives/configure/"]},
|
||||
"/docs/expression_language/cookbook/prompt_llm_parser/": {"canonical": "/docs/how_to/sequence/", "alternative": ["/v0.1/docs/expression_language/get_started/"]},
|
||||
"/docs/contributing/documentation/": {"canonical": "/docs/contributing/documentation/", "alternative": ["/v0.1/docs/contributing/documentation/technical_logistics/"]},
|
||||
"/docs/expression_language/cookbook/": {"canonical": "/docs/how_to/#langchain-expression-language-lcel", "alternative": ["/v0.1/docs/expression_language/"]},
|
||||
"/docs/integrations/text_embedding/solar/": {"canonical": "/docs/integrations/text_embedding/upstage/"},
|
||||
"/docs/integrations/chat/solar/": {"canonical": "/docs/integrations/chat/upstage/"},
|
||||
// custom ones
|
||||
|
||||
"/docs/modules/model_io/chat/llm_chain/": {
|
||||
"canonical": "/docs/tutorials/llm_chain/"
|
||||
},
|
||||
|
||||
"/docs/modules/agents/toolkits/": {
|
||||
"canonical": "/docs/integrations/tools/",
|
||||
"alternative": [
|
||||
"/v0.1/docs/integrations/toolkits/"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,22 @@
|
||||
}
|
||||
],
|
||||
"redirects": [
|
||||
{
|
||||
"source": "/docs/modules/agents/tools/custom_tools(/?)",
|
||||
"destination": "/docs/how_to/custom_tools/"
|
||||
},
|
||||
{
|
||||
"source": "/docs/expression_language(/?)",
|
||||
"destination": "/docs/concepts/#langchain-expression-language-lcel"
|
||||
},
|
||||
{
|
||||
"source": "/docs/expression_language/interface(/?)",
|
||||
"destination": "/docs/concepts/#runnable-interface"
|
||||
},
|
||||
{
|
||||
"source": "/docs/versions/overview(/?)",
|
||||
"destination": "/docs/versions/v0_2/overview/"
|
||||
},
|
||||
{
|
||||
"source": "/docs/how_to/tool_calls_multi_modal(/?)",
|
||||
"destination": "/docs/how_to/multimodal_inputs/"
|
||||
@@ -57,6 +73,10 @@
|
||||
{
|
||||
"source": "/v0.2/docs/templates/:path(.*/?)*",
|
||||
"destination": "https://github.com/langchain-ai/langchain/tree/master/templates/:path*"
|
||||
},
|
||||
{
|
||||
"source": "/docs/integrations/providers/mlflow_ai_gateway(/?)",
|
||||
"destination": "/docs/integrations/providers/mlflow/"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ license = "MIT"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.0.dev"
|
||||
langchain-core = "^0.3.0"
|
||||
|
||||
[tool.poetry.group.test]
|
||||
optional = true
|
||||
|
||||
@@ -5,6 +5,7 @@ from pathlib import Path
|
||||
import rich
|
||||
import typer
|
||||
from gritql import run
|
||||
from typer import Option
|
||||
|
||||
|
||||
def get_gritdir_path() -> Path:
|
||||
@@ -15,15 +16,28 @@ def get_gritdir_path() -> Path:
|
||||
|
||||
def migrate(
|
||||
ctx: typer.Context,
|
||||
# Using diff instead of dry-run for backwards compatibility with the old CLI
|
||||
diff: bool = Option(
|
||||
False,
|
||||
"--diff",
|
||||
help="Show the changes that would be made without applying them.",
|
||||
),
|
||||
interactive: bool = Option(
|
||||
False,
|
||||
"--interactive",
|
||||
help="Prompt for confirmation before making each change",
|
||||
),
|
||||
) -> None:
|
||||
"""Migrate langchain to the most recent version.
|
||||
|
||||
Any undocumented arguments will be passed to the Grit CLI.
|
||||
"""
|
||||
rich.print(
|
||||
"✈️ This script will help you migrate to a recent version LangChain. "
|
||||
"✈️ This script will help you migrate to a LangChain 0.3. "
|
||||
"This migration script will attempt to replace old imports in the code "
|
||||
"with new ones.\n\n"
|
||||
"with new ones. "
|
||||
"If you need to migrate to LangChain 0.2, please downgrade to version 0.0.29 "
|
||||
"of the langchain-cli.\n\n"
|
||||
"🔄 You will need to run the migration script TWICE to migrate (e.g., "
|
||||
"to update llms import from langchain, the script will first move them to "
|
||||
"corresponding imports from the community package, and on the second "
|
||||
@@ -45,9 +59,15 @@ def migrate(
|
||||
rich.print("-" * 10)
|
||||
rich.print()
|
||||
|
||||
args = list(ctx.args)
|
||||
if interactive:
|
||||
args.append("--interactive")
|
||||
if diff:
|
||||
args.append("--dry-run")
|
||||
|
||||
final_code = run.apply_pattern(
|
||||
"langchain_all_migrations()",
|
||||
ctx.args,
|
||||
args,
|
||||
grit_dir=get_gritdir_path(),
|
||||
)
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.0.dev"
|
||||
langchain-core = "^0.3.0"
|
||||
langchain-openai = ">=0.0.1"
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "langchain-cli"
|
||||
version = "0.0.30"
|
||||
version = "0.0.31"
|
||||
description = "CLI for interacting with LangChain"
|
||||
authors = ["Erick Friis <erick@langchain.dev>"]
|
||||
readme = "README.md"
|
||||
|
||||
@@ -15,7 +15,7 @@ LangChain Community contains third-party integrations that implement the base in
|
||||
|
||||
For full documentation see the [API reference](https://api.python.langchain.com/en/stable/community_api_reference.html).
|
||||
|
||||

|
||||

|
||||
|
||||
## 📕 Releases & Versioning
|
||||
|
||||
|
||||
@@ -301,7 +301,7 @@ class OpenAIAssistantV2Runnable(OpenAIAssistantRunnable):
|
||||
inheritable_metadata=config.get("metadata"),
|
||||
)
|
||||
run_manager = callback_manager.on_chain_start(
|
||||
dumpd(self), input, name=config.get("run_name")
|
||||
dumpd(self), input, name=config.get("run_name") or self.get_name()
|
||||
)
|
||||
|
||||
files = _convert_file_ids_into_attachments(kwargs.get("file_ids", []))
|
||||
@@ -437,7 +437,7 @@ class OpenAIAssistantV2Runnable(OpenAIAssistantRunnable):
|
||||
inheritable_metadata=config.get("metadata"),
|
||||
)
|
||||
run_manager = callback_manager.on_chain_start(
|
||||
dumpd(self), input, name=config.get("run_name")
|
||||
dumpd(self), input, name=config.get("run_name") or self.get_name()
|
||||
)
|
||||
|
||||
files = _convert_file_ids_into_attachments(kwargs.get("file_ids", []))
|
||||
|
||||
@@ -8,6 +8,18 @@ from langchain_core.messages import AIMessage
|
||||
from langchain_core.outputs import ChatGeneration, LLMResult
|
||||
|
||||
MODEL_COST_PER_1K_TOKENS = {
|
||||
# OpenAI o1-preview input
|
||||
"o1-preview": 0.015,
|
||||
"o1-preview-2024-09-12": 0.015,
|
||||
# OpenAI o1-preview output
|
||||
"o1-preview-completion": 0.06,
|
||||
"o1-preview-2024-09-12-completion": 0.06,
|
||||
# OpenAI o1-mini input
|
||||
"o1-mini": 0.003,
|
||||
"o1-mini-2024-09-12": 0.003,
|
||||
# OpenAI o1-mini output
|
||||
"o1-mini-completion": 0.012,
|
||||
"o1-mini-2024-09-12-completion": 0.012,
|
||||
# GPT-4o-mini input
|
||||
"gpt-4o-mini": 0.00015,
|
||||
"gpt-4o-mini-2024-07-18": 0.00015,
|
||||
@@ -153,6 +165,7 @@ def standardize_model_name(
|
||||
model_name.startswith("gpt-4")
|
||||
or model_name.startswith("gpt-3.5")
|
||||
or model_name.startswith("gpt-35")
|
||||
or model_name.startswith("o1-")
|
||||
or ("finetuned" in model_name and "legacy" not in model_name)
|
||||
):
|
||||
return model_name + "-completion"
|
||||
|
||||
@@ -53,13 +53,15 @@ class LLMThoughtLabeler:
|
||||
labeling logic.
|
||||
"""
|
||||
|
||||
def get_initial_label(self) -> str:
|
||||
@staticmethod
|
||||
def get_initial_label() -> str:
|
||||
"""Return the markdown label for a new LLMThought that doesn't have
|
||||
an associated tool yet.
|
||||
"""
|
||||
return f"{THINKING_EMOJI} **Thinking...**"
|
||||
|
||||
def get_tool_label(self, tool: ToolRecord, is_complete: bool) -> str:
|
||||
@staticmethod
|
||||
def get_tool_label(tool: ToolRecord, is_complete: bool) -> str:
|
||||
"""Return the label for an LLMThought that has an associated
|
||||
tool.
|
||||
|
||||
@@ -91,13 +93,15 @@ class LLMThoughtLabeler:
|
||||
label = f"{emoji} **{name}:** {input}"
|
||||
return label
|
||||
|
||||
def get_history_label(self) -> str:
|
||||
@staticmethod
|
||||
def get_history_label() -> str:
|
||||
"""Return a markdown label for the special 'history' container
|
||||
that contains overflow thoughts.
|
||||
"""
|
||||
return f"{HISTORY_EMOJI} **History**"
|
||||
|
||||
def get_final_agent_thought_label(self) -> str:
|
||||
@staticmethod
|
||||
def get_final_agent_thought_label() -> str:
|
||||
"""Return the markdown label for the agent's final thought -
|
||||
the "Now I have the answer" thought, that doesn't involve
|
||||
a tool.
|
||||
|
||||
@@ -20,13 +20,37 @@ class MongodbLoader(BaseLoader):
|
||||
*,
|
||||
filter_criteria: Optional[Dict] = None,
|
||||
field_names: Optional[Sequence[str]] = None,
|
||||
metadata_names: Optional[Sequence[str]] = None,
|
||||
include_db_collection_in_metadata: bool = True,
|
||||
) -> None:
|
||||
"""
|
||||
Initializes the MongoDB loader with necessary database connection
|
||||
details and configurations.
|
||||
|
||||
Args:
|
||||
connection_string (str): MongoDB connection URI.
|
||||
db_name (str):Name of the database to connect to.
|
||||
collection_name (str): Name of the collection to fetch documents from.
|
||||
filter_criteria (Optional[Dict]): MongoDB filter criteria for querying
|
||||
documents.
|
||||
field_names (Optional[Sequence[str]]): List of field names to retrieve
|
||||
from documents.
|
||||
metadata_names (Optional[Sequence[str]]): Additional metadata fields to
|
||||
extract from documents.
|
||||
include_db_collection_in_metadata (bool): Flag to include database and
|
||||
collection names in metadata.
|
||||
|
||||
Raises:
|
||||
ImportError: If the motor library is not installed.
|
||||
ValueError: If any necessary argument is missing.
|
||||
"""
|
||||
try:
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
except ImportError as e:
|
||||
raise ImportError(
|
||||
"Cannot import from motor, please install with `pip install motor`."
|
||||
) from e
|
||||
|
||||
if not connection_string:
|
||||
raise ValueError("connection_string must be provided.")
|
||||
|
||||
@@ -39,8 +63,10 @@ class MongodbLoader(BaseLoader):
|
||||
self.client = AsyncIOMotorClient(connection_string)
|
||||
self.db_name = db_name
|
||||
self.collection_name = collection_name
|
||||
self.field_names = field_names
|
||||
self.field_names = field_names or []
|
||||
self.filter_criteria = filter_criteria or {}
|
||||
self.metadata_names = metadata_names or []
|
||||
self.include_db_collection_in_metadata = include_db_collection_in_metadata
|
||||
|
||||
self.db = self.client.get_database(db_name)
|
||||
self.collection = self.db.get_collection(collection_name)
|
||||
@@ -60,36 +86,24 @@ class MongodbLoader(BaseLoader):
|
||||
return asyncio.run(self.aload())
|
||||
|
||||
async def aload(self) -> List[Document]:
|
||||
"""Load data into Document objects."""
|
||||
"""Asynchronously loads data into Document objects."""
|
||||
result = []
|
||||
total_docs = await self.collection.count_documents(self.filter_criteria)
|
||||
|
||||
# Construct the projection dictionary if field_names are specified
|
||||
projection = (
|
||||
{field: 1 for field in self.field_names} if self.field_names else None
|
||||
)
|
||||
projection = self._construct_projection()
|
||||
|
||||
async for doc in self.collection.find(self.filter_criteria, projection):
|
||||
metadata = {
|
||||
"database": self.db_name,
|
||||
"collection": self.collection_name,
|
||||
}
|
||||
metadata = self._extract_fields(doc, self.metadata_names, default="")
|
||||
|
||||
# Optionally add database and collection names to metadata
|
||||
if self.include_db_collection_in_metadata:
|
||||
metadata.update(
|
||||
{"database": self.db_name, "collection": self.collection_name}
|
||||
)
|
||||
|
||||
# Extract text content from filtered fields or use the entire document
|
||||
if self.field_names is not None:
|
||||
fields = {}
|
||||
for name in self.field_names:
|
||||
# Split the field names to handle nested fields
|
||||
keys = name.split(".")
|
||||
value = doc
|
||||
for key in keys:
|
||||
if key in value:
|
||||
value = value[key]
|
||||
else:
|
||||
value = ""
|
||||
break
|
||||
fields[name] = value
|
||||
|
||||
fields = self._extract_fields(doc, self.field_names, default="")
|
||||
texts = [str(value) for value in fields.values()]
|
||||
text = " ".join(texts)
|
||||
else:
|
||||
@@ -104,3 +118,29 @@ class MongodbLoader(BaseLoader):
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
def _construct_projection(self) -> Optional[Dict]:
|
||||
"""Constructs the projection dictionary for MongoDB query based
|
||||
on the specified field names and metadata names."""
|
||||
field_names = list(self.field_names) or []
|
||||
metadata_names = list(self.metadata_names) or []
|
||||
all_fields = field_names + metadata_names
|
||||
return {field: 1 for field in all_fields} if all_fields else None
|
||||
|
||||
def _extract_fields(
|
||||
self,
|
||||
document: Dict,
|
||||
fields: Sequence[str],
|
||||
default: str = "",
|
||||
) -> Dict:
|
||||
"""Extracts and returns values for specified fields from a document."""
|
||||
extracted = {}
|
||||
for field in fields or []:
|
||||
value = document
|
||||
for key in field.split("."):
|
||||
value = value.get(key, default)
|
||||
if value == default:
|
||||
break
|
||||
new_field_name = field.replace(".", "_")
|
||||
extracted[new_field_name] = value
|
||||
return extracted
|
||||
|
||||
@@ -132,6 +132,7 @@ class BeautifulSoupTransformer(BaseDocumentTransformer):
|
||||
Args:
|
||||
html_content: The original HTML content string.
|
||||
tags: A list of tags to be extracted from the HTML.
|
||||
remove_comments: If set to True, the comments will be removed.
|
||||
|
||||
Returns:
|
||||
A string combining the content of the extracted tags.
|
||||
@@ -184,6 +185,7 @@ def get_navigable_strings(
|
||||
|
||||
Args:
|
||||
element: A BeautifulSoup element.
|
||||
remove_comments: If set to True, the comments will be removed.
|
||||
|
||||
Returns:
|
||||
A generator of strings.
|
||||
|
||||
@@ -87,7 +87,7 @@ class GLiNERLinkExtractor(LinkExtractor[GLiNERInput]):
|
||||
Using LinkExtractorTransformer
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Using the :class:`~langchain_community.graph_vectorstores.extractors.keybert_link_extractor.LinkExtractorTransformer`,
|
||||
Using the :class:`~langchain_community.graph_vectorstores.extractors.link_extractor_transformer.LinkExtractorTransformer`,
|
||||
we can simplify the link extraction::
|
||||
|
||||
from langchain_community.document_loaders import TextLoader
|
||||
|
||||
@@ -176,7 +176,7 @@ class HtmlLinkExtractor(LinkExtractor[HtmlInput]):
|
||||
Using LinkExtractorTransformer
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Using the :class:`~langchain_community.graph_vectorstores.extractors.keybert_link_extractor.LinkExtractorTransformer`,
|
||||
Using the :class:`~langchain_community.graph_vectorstores.extractors.link_extractor_transformer.LinkExtractorTransformer`,
|
||||
we can simplify the link extraction::
|
||||
|
||||
from langchain_community.document_loaders import AsyncHtmlLoader
|
||||
|
||||
@@ -91,7 +91,7 @@ class KeybertLinkExtractor(LinkExtractor[KeybertInput]):
|
||||
Using LinkExtractorTransformer
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Using the :class:`~langchain_community.graph_vectorstores.extractors.keybert_link_extractor.LinkExtractorTransformer`,
|
||||
Using the :class:`~langchain_community.graph_vectorstores.extractors.link_extractor_transformer.LinkExtractorTransformer`,
|
||||
we can simplify the link extraction::
|
||||
|
||||
from langchain_community.document_loaders import TextLoader
|
||||
|
||||
@@ -144,7 +144,7 @@ class TencentVectorDB(VectorStore):
|
||||
|
||||
In order to use this you need to have a database instance.
|
||||
See the following documentation for details:
|
||||
https://cloud.tencent.com/document/product/1709/94951
|
||||
https://cloud.tencent.com/document/product/1709/104489
|
||||
"""
|
||||
|
||||
field_id: str = "id"
|
||||
|
||||
16
libs/community/poetry.lock
generated
16
libs/community/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
@@ -1780,7 +1780,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.3.0.dev2"
|
||||
version = "0.3.0"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -1790,8 +1790,8 @@ develop = true
|
||||
[package.dependencies]
|
||||
aiohttp = "^3.8.3"
|
||||
async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""}
|
||||
langchain-core = "^0.3.0.dev5"
|
||||
langchain-text-splitters = "^0.3.0.dev1"
|
||||
langchain-core = "^0.3.0"
|
||||
langchain-text-splitters = "^0.3.0"
|
||||
langsmith = "^0.1.17"
|
||||
numpy = [
|
||||
{version = ">=1,<2", markers = "python_version < \"3.12\""},
|
||||
@@ -1809,7 +1809,7 @@ url = "../langchain"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.0.dev5"
|
||||
version = "0.3.0"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -1853,7 +1853,7 @@ url = "../standard-tests"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.0.dev1"
|
||||
version = "0.3.0"
|
||||
description = "LangChain text splitting utilities"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -1861,7 +1861,7 @@ files = []
|
||||
develop = true
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = "^0.3.0.dev1"
|
||||
langchain-core = "^0.3.0"
|
||||
|
||||
[package.source]
|
||||
type = "directory"
|
||||
@@ -4386,4 +4386,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "1a81994350c65c891f5f592a522975bc6688cfad016f2af5fe8ad93a76209066"
|
||||
content-hash = "ee964a118892539749a10eeb2e7e8ce5570cf84faf02ea226fa2af865dc14135"
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-community"
|
||||
version = "0.3.0.dev2"
|
||||
version = "0.3.0"
|
||||
description = "Community contributed LangChain integrations."
|
||||
authors = []
|
||||
license = "MIT"
|
||||
@@ -33,8 +33,8 @@ ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogy
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = { version = "^0.3.0.dev5", allow-prereleases = true }
|
||||
langchain = { version = "^0.3.0.dev2", allow-prereleases = true }
|
||||
langchain-core = "^0.3.0"
|
||||
langchain = "^0.3.0"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
requests = "^2"
|
||||
PyYAML = ">=5.3"
|
||||
|
||||
@@ -121,4 +121,4 @@ def test_callback_manager_configure_context_vars(
|
||||
assert cb.completion_tokens == 1
|
||||
assert cb.total_cost > 0
|
||||
wait_for_all_tracers()
|
||||
assert LangChainTracer._persist_run_single.call_count == 1 # type: ignore
|
||||
assert LangChainTracer._persist_run_single.call_count == 4 # type: ignore
|
||||
|
||||
@@ -12,6 +12,7 @@ def raw_docs() -> List[Dict]:
|
||||
return [
|
||||
{"_id": "1", "address": {"building": "1", "room": "1"}},
|
||||
{"_id": "2", "address": {"building": "2", "room": "2"}},
|
||||
{"_id": "3", "address": {"building": "3", "room": "2"}},
|
||||
]
|
||||
|
||||
|
||||
@@ -19,18 +20,23 @@ def raw_docs() -> List[Dict]:
|
||||
def expected_documents() -> List[Document]:
|
||||
return [
|
||||
Document(
|
||||
page_content="{'_id': '1', 'address': {'building': '1', 'room': '1'}}",
|
||||
page_content="{'_id': '2', 'address': {'building': '2', 'room': '2'}}",
|
||||
metadata={"database": "sample_restaurants", "collection": "restaurants"},
|
||||
),
|
||||
Document(
|
||||
page_content="{'_id': '2', 'address': {'building': '2', 'room': '2'}}",
|
||||
page_content="{'_id': '3', 'address': {'building': '3', 'room': '2'}}",
|
||||
metadata={"database": "sample_restaurants", "collection": "restaurants"},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.requires("motor")
|
||||
async def test_load_mocked(expected_documents: List[Document]) -> None:
|
||||
async def test_load_mocked_with_filters(expected_documents: List[Document]) -> None:
|
||||
filter_criteria = {"address.room": {"$eq": "2"}}
|
||||
field_names = ["address.building", "address.room"]
|
||||
metadata_names = ["_id"]
|
||||
include_db_collection_in_metadata = True
|
||||
|
||||
mock_async_load = AsyncMock()
|
||||
mock_async_load.return_value = expected_documents
|
||||
|
||||
@@ -51,7 +57,13 @@ async def test_load_mocked(expected_documents: List[Document]) -> None:
|
||||
new=mock_async_load,
|
||||
):
|
||||
loader = MongodbLoader(
|
||||
"mongodb://localhost:27017", "test_db", "test_collection"
|
||||
"mongodb://localhost:27017",
|
||||
"test_db",
|
||||
"test_collection",
|
||||
filter_criteria=filter_criteria,
|
||||
field_names=field_names,
|
||||
metadata_names=metadata_names,
|
||||
include_db_collection_in_metadata=include_db_collection_in_metadata,
|
||||
)
|
||||
loader.collection = mock_collection
|
||||
documents = await loader.aload()
|
||||
|
||||
@@ -53,7 +53,7 @@ LangChain Core compiles LCEL sequences to an _optimized execution plan_, with au
|
||||
|
||||
For more check out the [LCEL docs](https://python.langchain.com/docs/expression_language/).
|
||||
|
||||

|
||||

|
||||
|
||||
For more advanced use cases, also check out [LangGraph](https://github.com/langchain-ai/langgraph), which is a graph-based runner for cyclic and recursive LLM workflows.
|
||||
|
||||
|
||||
@@ -14,7 +14,8 @@ import contextlib
|
||||
import functools
|
||||
import inspect
|
||||
import warnings
|
||||
from typing import Any, Callable, Generator, Type, TypeVar, Union, cast
|
||||
from collections.abc import Generator
|
||||
from typing import Any, Callable, TypeVar, Union, cast
|
||||
|
||||
from langchain_core._api.internal import is_caller_internal
|
||||
|
||||
@@ -26,7 +27,7 @@ class LangChainBetaWarning(DeprecationWarning):
|
||||
# PUBLIC API
|
||||
|
||||
|
||||
T = TypeVar("T", bound=Union[Callable[..., Any], Type])
|
||||
T = TypeVar("T", bound=Union[Callable[..., Any], type])
|
||||
|
||||
|
||||
def beta(
|
||||
|
||||
@@ -14,11 +14,10 @@ import contextlib
|
||||
import functools
|
||||
import inspect
|
||||
import warnings
|
||||
from collections.abc import Generator
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Generator,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
@@ -41,7 +40,7 @@ class LangChainPendingDeprecationWarning(PendingDeprecationWarning):
|
||||
|
||||
|
||||
# Last Any should be FieldInfoV1 but this leads to circular imports
|
||||
T = TypeVar("T", bound=Union[Type, Callable[..., Any], Any])
|
||||
T = TypeVar("T", bound=Union[type, Callable[..., Any], Any])
|
||||
|
||||
|
||||
def _validate_deprecation_params(
|
||||
@@ -262,7 +261,7 @@ def deprecated(
|
||||
if not _obj_type:
|
||||
_obj_type = "attribute"
|
||||
wrapped = None
|
||||
_name = _name or cast(Union[Type, Callable], obj.fget).__qualname__
|
||||
_name = _name or cast(Union[type, Callable], obj.fget).__qualname__
|
||||
old_doc = obj.__doc__
|
||||
|
||||
class _deprecated_property(property):
|
||||
@@ -304,7 +303,7 @@ def deprecated(
|
||||
)
|
||||
|
||||
else:
|
||||
_name = _name or cast(Union[Type, Callable], obj).__qualname__
|
||||
_name = _name or cast(Union[type, Callable], obj).__qualname__
|
||||
if not _obj_type:
|
||||
# edge case: when a function is within another function
|
||||
# within a test, this will call it a "method" not a "function"
|
||||
@@ -333,9 +332,26 @@ def deprecated(
|
||||
old_doc = ""
|
||||
|
||||
# Modify the docstring to include a deprecation notice.
|
||||
if (
|
||||
_alternative
|
||||
and _alternative.split(".")[-1].lower() == _alternative.split(".")[-1]
|
||||
):
|
||||
_alternative = f":meth:`~{_alternative}`"
|
||||
elif _alternative:
|
||||
_alternative = f":class:`~{_alternative}`"
|
||||
|
||||
if (
|
||||
_alternative_import
|
||||
and _alternative_import.split(".")[-1].lower()
|
||||
== _alternative_import.split(".")[-1]
|
||||
):
|
||||
_alternative_import = f":meth:`~{_alternative_import}`"
|
||||
elif _alternative_import:
|
||||
_alternative_import = f":class:`~{_alternative_import}`"
|
||||
|
||||
components = [
|
||||
_message,
|
||||
f"Use ``{_alternative}`` instead." if _alternative else "",
|
||||
f"Use {_alternative} instead." if _alternative else "",
|
||||
f"Use ``{_alternative_import}`` instead." if _alternative_import else "",
|
||||
_addendum,
|
||||
]
|
||||
|
||||
@@ -25,7 +25,8 @@ The schemas for the agents themselves are defined in langchain.agents.agent.
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, List, Literal, Sequence, Union
|
||||
from collections.abc import Sequence
|
||||
from typing import Any, Literal, Union
|
||||
|
||||
from langchain_core.load.serializable import Serializable
|
||||
from langchain_core.messages import (
|
||||
@@ -71,7 +72,7 @@ class AgentAction(Serializable):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "agent"]."""
|
||||
return ["langchain", "schema", "agent"]
|
||||
@@ -145,7 +146,7 @@ class AgentFinish(Serializable):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object."""
|
||||
return ["langchain", "schema", "agent"]
|
||||
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
import asyncio
|
||||
import threading
|
||||
from collections import defaultdict
|
||||
from collections.abc import Awaitable, Mapping, Sequence
|
||||
from functools import partial
|
||||
from itertools import groupby
|
||||
from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
DefaultDict,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
@@ -30,7 +24,7 @@ from langchain_core.runnables.config import RunnableConfig, ensure_config, patch
|
||||
from langchain_core.runnables.utils import ConfigurableFieldSpec, Input, Output
|
||||
|
||||
T = TypeVar("T")
|
||||
Values = Dict[Union[asyncio.Event, threading.Event], Any]
|
||||
Values = dict[Union[asyncio.Event, threading.Event], Any]
|
||||
CONTEXT_CONFIG_PREFIX = "__context__/"
|
||||
CONTEXT_CONFIG_SUFFIX_GET = "/get"
|
||||
CONTEXT_CONFIG_SUFFIX_SET = "/set"
|
||||
@@ -70,10 +64,10 @@ def _key_from_id(id_: str) -> str:
|
||||
|
||||
def _config_with_context(
|
||||
config: RunnableConfig,
|
||||
steps: List[Runnable],
|
||||
steps: list[Runnable],
|
||||
setter: Callable,
|
||||
getter: Callable,
|
||||
event_cls: Union[Type[threading.Event], Type[asyncio.Event]],
|
||||
event_cls: Union[type[threading.Event], type[asyncio.Event]],
|
||||
) -> RunnableConfig:
|
||||
if any(k.startswith(CONTEXT_CONFIG_PREFIX) for k in config.get("configurable", {})):
|
||||
return config
|
||||
@@ -99,10 +93,10 @@ def _config_with_context(
|
||||
}
|
||||
|
||||
values: Values = {}
|
||||
events: DefaultDict[str, Union[asyncio.Event, threading.Event]] = defaultdict(
|
||||
events: defaultdict[str, Union[asyncio.Event, threading.Event]] = defaultdict(
|
||||
event_cls
|
||||
)
|
||||
context_funcs: Dict[str, Callable[[], Any]] = {}
|
||||
context_funcs: dict[str, Callable[[], Any]] = {}
|
||||
for key, group in grouped_by_key.items():
|
||||
getters = [s for s in group if s[0].id.endswith(CONTEXT_CONFIG_SUFFIX_GET)]
|
||||
setters = [s for s in group if s[0].id.endswith(CONTEXT_CONFIG_SUFFIX_SET)]
|
||||
@@ -129,7 +123,7 @@ def _config_with_context(
|
||||
|
||||
def aconfig_with_context(
|
||||
config: RunnableConfig,
|
||||
steps: List[Runnable],
|
||||
steps: list[Runnable],
|
||||
) -> RunnableConfig:
|
||||
"""Asynchronously patch a runnable config with context getters and setters.
|
||||
|
||||
@@ -145,7 +139,7 @@ def aconfig_with_context(
|
||||
|
||||
def config_with_context(
|
||||
config: RunnableConfig,
|
||||
steps: List[Runnable],
|
||||
steps: list[Runnable],
|
||||
) -> RunnableConfig:
|
||||
"""Patch a runnable config with context getters and setters.
|
||||
|
||||
@@ -165,13 +159,13 @@ class ContextGet(RunnableSerializable):
|
||||
|
||||
prefix: str = ""
|
||||
|
||||
key: Union[str, List[str]]
|
||||
key: Union[str, list[str]]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"ContextGet({_print_keys(self.key)})"
|
||||
|
||||
@property
|
||||
def ids(self) -> List[str]:
|
||||
def ids(self) -> list[str]:
|
||||
prefix = self.prefix + "/" if self.prefix else ""
|
||||
keys = self.key if isinstance(self.key, list) else [self.key]
|
||||
return [
|
||||
@@ -180,7 +174,7 @@ class ContextGet(RunnableSerializable):
|
||||
]
|
||||
|
||||
@property
|
||||
def config_specs(self) -> List[ConfigurableFieldSpec]:
|
||||
def config_specs(self) -> list[ConfigurableFieldSpec]:
|
||||
return super().config_specs + [
|
||||
ConfigurableFieldSpec(
|
||||
id=id_,
|
||||
@@ -256,7 +250,7 @@ class ContextSet(RunnableSerializable):
|
||||
return f"ContextSet({_print_keys(list(self.keys.keys()))})"
|
||||
|
||||
@property
|
||||
def ids(self) -> List[str]:
|
||||
def ids(self) -> list[str]:
|
||||
prefix = self.prefix + "/" if self.prefix else ""
|
||||
return [
|
||||
f"{CONTEXT_CONFIG_PREFIX}{prefix}{key}{CONTEXT_CONFIG_SUFFIX_SET}"
|
||||
@@ -264,7 +258,7 @@ class ContextSet(RunnableSerializable):
|
||||
]
|
||||
|
||||
@property
|
||||
def config_specs(self) -> List[ConfigurableFieldSpec]:
|
||||
def config_specs(self) -> list[ConfigurableFieldSpec]:
|
||||
mapper_config_specs = [
|
||||
s
|
||||
for mapper in self.keys.values()
|
||||
@@ -364,7 +358,7 @@ class Context:
|
||||
return PrefixContext(prefix=scope)
|
||||
|
||||
@staticmethod
|
||||
def getter(key: Union[str, List[str]], /) -> ContextGet:
|
||||
def getter(key: Union[str, list[str]], /) -> ContextGet:
|
||||
return ContextGet(key=key)
|
||||
|
||||
@staticmethod
|
||||
@@ -385,7 +379,7 @@ class PrefixContext:
|
||||
def __init__(self, prefix: str = ""):
|
||||
self.prefix = prefix
|
||||
|
||||
def getter(self, key: Union[str, List[str]], /) -> ContextGet:
|
||||
def getter(self, key: Union[str, list[str]], /) -> ContextGet:
|
||||
return ContextGet(key=key, prefix=self.prefix)
|
||||
|
||||
def setter(
|
||||
|
||||
@@ -23,7 +23,8 @@ Cache directly competes with Memory. See documentation for Pros and Cons.
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Dict, Optional, Sequence, Tuple
|
||||
from collections.abc import Sequence
|
||||
from typing import Any, Optional
|
||||
|
||||
from langchain_core.outputs import Generation
|
||||
from langchain_core.runnables import run_in_executor
|
||||
@@ -157,7 +158,7 @@ class InMemoryCache(BaseCache):
|
||||
Raises:
|
||||
ValueError: If maxsize is less than or equal to 0.
|
||||
"""
|
||||
self._cache: Dict[Tuple[str, str], RETURN_VAL_TYPE] = {}
|
||||
self._cache: dict[tuple[str, str], RETURN_VAL_TYPE] = {}
|
||||
if maxsize is not None and maxsize <= 0:
|
||||
raise ValueError("maxsize must be greater than 0")
|
||||
self._maxsize = maxsize
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, TypeVar, Union
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union
|
||||
from uuid import UUID
|
||||
|
||||
from tenacity import RetryCallState
|
||||
@@ -118,7 +119,7 @@ class ChainManagerMixin:
|
||||
|
||||
def on_chain_end(
|
||||
self,
|
||||
outputs: Dict[str, Any],
|
||||
outputs: dict[str, Any],
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
@@ -222,13 +223,13 @@ class CallbackManagerMixin:
|
||||
|
||||
def on_llm_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
prompts: List[str],
|
||||
serialized: dict[str, Any],
|
||||
prompts: list[str],
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run when LLM starts running.
|
||||
@@ -249,13 +250,13 @@ class CallbackManagerMixin:
|
||||
|
||||
def on_chat_model_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
messages: List[List[BaseMessage]],
|
||||
serialized: dict[str, Any],
|
||||
messages: list[list[BaseMessage]],
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run when a chat model starts running.
|
||||
@@ -280,13 +281,13 @@ class CallbackManagerMixin:
|
||||
|
||||
def on_retriever_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
serialized: dict[str, Any],
|
||||
query: str,
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run when the Retriever starts running.
|
||||
@@ -303,13 +304,13 @@ class CallbackManagerMixin:
|
||||
|
||||
def on_chain_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
inputs: Dict[str, Any],
|
||||
serialized: dict[str, Any],
|
||||
inputs: dict[str, Any],
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run when a chain starts running.
|
||||
@@ -326,14 +327,14 @@ class CallbackManagerMixin:
|
||||
|
||||
def on_tool_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
serialized: dict[str, Any],
|
||||
input_str: str,
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
inputs: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
inputs: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run when the tool starts running.
|
||||
@@ -393,8 +394,8 @@ class RunManagerMixin:
|
||||
data: Any,
|
||||
*,
|
||||
run_id: UUID,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Override to define a handler for a custom event.
|
||||
@@ -470,13 +471,13 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
|
||||
async def on_llm_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
prompts: List[str],
|
||||
serialized: dict[str, Any],
|
||||
prompts: list[str],
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when LLM starts running.
|
||||
@@ -497,13 +498,13 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
|
||||
async def on_chat_model_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
messages: List[List[BaseMessage]],
|
||||
serialized: dict[str, Any],
|
||||
messages: list[list[BaseMessage]],
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run when a chat model starts running.
|
||||
@@ -533,7 +534,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]] = None,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run on new LLM token. Only available when streaming is enabled.
|
||||
@@ -554,7 +555,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when LLM ends running.
|
||||
@@ -573,7 +574,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when LLM errors.
|
||||
@@ -590,13 +591,13 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
|
||||
async def on_chain_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
inputs: Dict[str, Any],
|
||||
serialized: dict[str, Any],
|
||||
inputs: dict[str, Any],
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when a chain starts running.
|
||||
@@ -613,11 +614,11 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
|
||||
async def on_chain_end(
|
||||
self,
|
||||
outputs: Dict[str, Any],
|
||||
outputs: dict[str, Any],
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when a chain ends running.
|
||||
@@ -636,7 +637,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when chain errors.
|
||||
@@ -651,14 +652,14 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
|
||||
async def on_tool_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
serialized: dict[str, Any],
|
||||
input_str: str,
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
inputs: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
inputs: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when the tool starts running.
|
||||
@@ -680,7 +681,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when the tool ends running.
|
||||
@@ -699,7 +700,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when tool errors.
|
||||
@@ -718,7 +719,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run on an arbitrary text.
|
||||
@@ -754,7 +755,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run on agent action.
|
||||
@@ -773,7 +774,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run on the agent end.
|
||||
@@ -788,13 +789,13 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
|
||||
async def on_retriever_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
serialized: dict[str, Any],
|
||||
query: str,
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run on the retriever start.
|
||||
@@ -815,7 +816,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run on the retriever end.
|
||||
@@ -833,7 +834,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run on retriever error.
|
||||
@@ -852,8 +853,8 @@ class AsyncCallbackHandler(BaseCallbackHandler):
|
||||
data: Any,
|
||||
*,
|
||||
run_id: UUID,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Override to define a handler for a custom event.
|
||||
@@ -880,14 +881,14 @@ class BaseCallbackManager(CallbackManagerMixin):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
handlers: List[BaseCallbackHandler],
|
||||
inheritable_handlers: Optional[List[BaseCallbackHandler]] = None,
|
||||
handlers: list[BaseCallbackHandler],
|
||||
inheritable_handlers: Optional[list[BaseCallbackHandler]] = None,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
*,
|
||||
tags: Optional[List[str]] = None,
|
||||
inheritable_tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
inheritable_metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
inheritable_tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
inheritable_metadata: Optional[dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""Initialize callback manager.
|
||||
|
||||
@@ -901,8 +902,8 @@ class BaseCallbackManager(CallbackManagerMixin):
|
||||
Default is None.
|
||||
metadata (Optional[Dict[str, Any]]): The metadata. Default is None.
|
||||
"""
|
||||
self.handlers: List[BaseCallbackHandler] = handlers
|
||||
self.inheritable_handlers: List[BaseCallbackHandler] = (
|
||||
self.handlers: list[BaseCallbackHandler] = handlers
|
||||
self.inheritable_handlers: list[BaseCallbackHandler] = (
|
||||
inheritable_handlers or []
|
||||
)
|
||||
self.parent_run_id: Optional[UUID] = parent_run_id
|
||||
@@ -1002,7 +1003,7 @@ class BaseCallbackManager(CallbackManagerMixin):
|
||||
self.inheritable_handlers.remove(handler)
|
||||
|
||||
def set_handlers(
|
||||
self, handlers: List[BaseCallbackHandler], inherit: bool = True
|
||||
self, handlers: list[BaseCallbackHandler], inherit: bool = True
|
||||
) -> None:
|
||||
"""Set handlers as the only handlers on the callback manager.
|
||||
|
||||
@@ -1024,7 +1025,7 @@ class BaseCallbackManager(CallbackManagerMixin):
|
||||
"""
|
||||
self.set_handlers([handler], inherit=inherit)
|
||||
|
||||
def add_tags(self, tags: List[str], inherit: bool = True) -> None:
|
||||
def add_tags(self, tags: list[str], inherit: bool = True) -> None:
|
||||
"""Add tags to the callback manager.
|
||||
|
||||
Args:
|
||||
@@ -1038,7 +1039,7 @@ class BaseCallbackManager(CallbackManagerMixin):
|
||||
if inherit:
|
||||
self.inheritable_tags.extend(tags)
|
||||
|
||||
def remove_tags(self, tags: List[str]) -> None:
|
||||
def remove_tags(self, tags: list[str]) -> None:
|
||||
"""Remove tags from the callback manager.
|
||||
|
||||
Args:
|
||||
@@ -1048,7 +1049,7 @@ class BaseCallbackManager(CallbackManagerMixin):
|
||||
self.tags.remove(tag)
|
||||
self.inheritable_tags.remove(tag)
|
||||
|
||||
def add_metadata(self, metadata: Dict[str, Any], inherit: bool = True) -> None:
|
||||
def add_metadata(self, metadata: dict[str, Any], inherit: bool = True) -> None:
|
||||
"""Add metadata to the callback manager.
|
||||
|
||||
Args:
|
||||
@@ -1059,7 +1060,7 @@ class BaseCallbackManager(CallbackManagerMixin):
|
||||
if inherit:
|
||||
self.inheritable_metadata.update(metadata)
|
||||
|
||||
def remove_metadata(self, keys: List[str]) -> None:
|
||||
def remove_metadata(self, keys: list[str]) -> None:
|
||||
"""Remove metadata from the callback manager.
|
||||
|
||||
Args:
|
||||
@@ -1070,4 +1071,4 @@ class BaseCallbackManager(CallbackManagerMixin):
|
||||
self.inheritable_metadata.pop(key)
|
||||
|
||||
|
||||
Callbacks = Optional[Union[List[BaseCallbackHandler], BaseCallbackManager]]
|
||||
Callbacks = Optional[Union[list[BaseCallbackHandler], BaseCallbackManager]]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Optional, TextIO, cast
|
||||
from typing import Any, Optional, TextIO, cast
|
||||
|
||||
from langchain_core.agents import AgentAction, AgentFinish
|
||||
from langchain_core.callbacks import BaseCallbackHandler
|
||||
@@ -35,7 +35,7 @@ class FileCallbackHandler(BaseCallbackHandler):
|
||||
self.file.close()
|
||||
|
||||
def on_chain_start(
|
||||
self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any
|
||||
self, serialized: dict[str, Any], inputs: dict[str, Any], **kwargs: Any
|
||||
) -> None:
|
||||
"""Print out that we are entering a chain.
|
||||
|
||||
@@ -51,7 +51,7 @@ class FileCallbackHandler(BaseCallbackHandler):
|
||||
file=self.file,
|
||||
)
|
||||
|
||||
def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None:
|
||||
def on_chain_end(self, outputs: dict[str, Any], **kwargs: Any) -> None:
|
||||
"""Print out that we finished a chain.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -5,21 +5,15 @@ import functools
|
||||
import logging
|
||||
import uuid
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import AsyncGenerator, Coroutine, Generator, Sequence
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from contextlib import asynccontextmanager, contextmanager
|
||||
from contextvars import copy_context
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AsyncGenerator,
|
||||
Callable,
|
||||
Coroutine,
|
||||
Dict,
|
||||
Generator,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
@@ -64,12 +58,12 @@ def trace_as_chain_group(
|
||||
group_name: str,
|
||||
callback_manager: Optional[CallbackManager] = None,
|
||||
*,
|
||||
inputs: Optional[Dict[str, Any]] = None,
|
||||
inputs: Optional[dict[str, Any]] = None,
|
||||
project_name: Optional[str] = None,
|
||||
example_id: Optional[Union[str, UUID]] = None,
|
||||
run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
) -> Generator[CallbackManagerForChainGroup, None, None]:
|
||||
"""Get a callback manager for a chain group in a context manager.
|
||||
Useful for grouping different calls together as a single run even if
|
||||
@@ -144,12 +138,12 @@ async def atrace_as_chain_group(
|
||||
group_name: str,
|
||||
callback_manager: Optional[AsyncCallbackManager] = None,
|
||||
*,
|
||||
inputs: Optional[Dict[str, Any]] = None,
|
||||
inputs: Optional[dict[str, Any]] = None,
|
||||
project_name: Optional[str] = None,
|
||||
example_id: Optional[Union[str, UUID]] = None,
|
||||
run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
) -> AsyncGenerator[AsyncCallbackManagerForChainGroup, None]:
|
||||
"""Get an async callback manager for a chain group in a context manager.
|
||||
Useful for grouping different async calls together as a single run even if
|
||||
@@ -240,7 +234,7 @@ def shielded(func: Func) -> Func:
|
||||
|
||||
|
||||
def handle_event(
|
||||
handlers: List[BaseCallbackHandler],
|
||||
handlers: list[BaseCallbackHandler],
|
||||
event_name: str,
|
||||
ignore_condition_name: Optional[str],
|
||||
*args: Any,
|
||||
@@ -258,10 +252,10 @@ def handle_event(
|
||||
*args: The arguments to pass to the event handler.
|
||||
**kwargs: The keyword arguments to pass to the event handler
|
||||
"""
|
||||
coros: List[Coroutine[Any, Any, Any]] = []
|
||||
coros: list[Coroutine[Any, Any, Any]] = []
|
||||
|
||||
try:
|
||||
message_strings: Optional[List[str]] = None
|
||||
message_strings: Optional[list[str]] = None
|
||||
for handler in handlers:
|
||||
try:
|
||||
if ignore_condition_name is None or not getattr(
|
||||
@@ -318,7 +312,7 @@ def handle_event(
|
||||
_run_coros(coros)
|
||||
|
||||
|
||||
def _run_coros(coros: List[Coroutine[Any, Any, Any]]) -> None:
|
||||
def _run_coros(coros: list[Coroutine[Any, Any, Any]]) -> None:
|
||||
if hasattr(asyncio, "Runner"):
|
||||
# Python 3.11+
|
||||
# Run the coroutines in a new event loop, taking care to
|
||||
@@ -399,7 +393,7 @@ async def _ahandle_event_for_handler(
|
||||
|
||||
|
||||
async def ahandle_event(
|
||||
handlers: List[BaseCallbackHandler],
|
||||
handlers: list[BaseCallbackHandler],
|
||||
event_name: str,
|
||||
ignore_condition_name: Optional[str],
|
||||
*args: Any,
|
||||
@@ -446,13 +440,13 @@ class BaseRunManager(RunManagerMixin):
|
||||
self,
|
||||
*,
|
||||
run_id: UUID,
|
||||
handlers: List[BaseCallbackHandler],
|
||||
inheritable_handlers: List[BaseCallbackHandler],
|
||||
handlers: list[BaseCallbackHandler],
|
||||
inheritable_handlers: list[BaseCallbackHandler],
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
inheritable_tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
inheritable_metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
inheritable_tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
inheritable_metadata: Optional[dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""Initialize the run manager.
|
||||
|
||||
@@ -481,7 +475,7 @@ class BaseRunManager(RunManagerMixin):
|
||||
self.inheritable_metadata = inheritable_metadata or {}
|
||||
|
||||
@classmethod
|
||||
def get_noop_manager(cls: Type[BRM]) -> BRM:
|
||||
def get_noop_manager(cls: type[BRM]) -> BRM:
|
||||
"""Return a manager that doesn't perform any operations.
|
||||
|
||||
Returns:
|
||||
@@ -824,7 +818,7 @@ class AsyncCallbackManagerForLLMRun(AsyncRunManager, LLMManagerMixin):
|
||||
class CallbackManagerForChainRun(ParentRunManager, ChainManagerMixin):
|
||||
"""Callback manager for chain run."""
|
||||
|
||||
def on_chain_end(self, outputs: Union[Dict[str, Any], Any], **kwargs: Any) -> None:
|
||||
def on_chain_end(self, outputs: Union[dict[str, Any], Any], **kwargs: Any) -> None:
|
||||
"""Run when chain ends running.
|
||||
|
||||
Args:
|
||||
@@ -929,7 +923,7 @@ class AsyncCallbackManagerForChainRun(AsyncParentRunManager, ChainManagerMixin):
|
||||
|
||||
@shielded
|
||||
async def on_chain_end(
|
||||
self, outputs: Union[Dict[str, Any], Any], **kwargs: Any
|
||||
self, outputs: Union[dict[str, Any], Any], **kwargs: Any
|
||||
) -> None:
|
||||
"""Run when a chain ends running.
|
||||
|
||||
@@ -1248,11 +1242,11 @@ class CallbackManager(BaseCallbackManager):
|
||||
|
||||
def on_llm_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
prompts: List[str],
|
||||
serialized: dict[str, Any],
|
||||
prompts: list[str],
|
||||
run_id: Optional[UUID] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[CallbackManagerForLLMRun]:
|
||||
) -> list[CallbackManagerForLLMRun]:
|
||||
"""Run when LLM starts running.
|
||||
|
||||
Args:
|
||||
@@ -1299,11 +1293,11 @@ class CallbackManager(BaseCallbackManager):
|
||||
|
||||
def on_chat_model_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
messages: List[List[BaseMessage]],
|
||||
serialized: dict[str, Any],
|
||||
messages: list[list[BaseMessage]],
|
||||
run_id: Optional[UUID] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[CallbackManagerForLLMRun]:
|
||||
) -> list[CallbackManagerForLLMRun]:
|
||||
"""Run when LLM starts running.
|
||||
|
||||
Args:
|
||||
@@ -1354,8 +1348,8 @@ class CallbackManager(BaseCallbackManager):
|
||||
|
||||
def on_chain_start(
|
||||
self,
|
||||
serialized: Optional[Dict[str, Any]],
|
||||
inputs: Union[Dict[str, Any], Any],
|
||||
serialized: Optional[dict[str, Any]],
|
||||
inputs: Union[dict[str, Any], Any],
|
||||
run_id: Optional[UUID] = None,
|
||||
**kwargs: Any,
|
||||
) -> CallbackManagerForChainRun:
|
||||
@@ -1398,11 +1392,11 @@ class CallbackManager(BaseCallbackManager):
|
||||
|
||||
def on_tool_start(
|
||||
self,
|
||||
serialized: Optional[Dict[str, Any]],
|
||||
serialized: Optional[dict[str, Any]],
|
||||
input_str: str,
|
||||
run_id: Optional[UUID] = None,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
inputs: Optional[Dict[str, Any]] = None,
|
||||
inputs: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> CallbackManagerForToolRun:
|
||||
"""Run when tool starts running.
|
||||
@@ -1453,7 +1447,7 @@ class CallbackManager(BaseCallbackManager):
|
||||
|
||||
def on_retriever_start(
|
||||
self,
|
||||
serialized: Optional[Dict[str, Any]],
|
||||
serialized: Optional[dict[str, Any]],
|
||||
query: str,
|
||||
run_id: Optional[UUID] = None,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
@@ -1541,10 +1535,10 @@ class CallbackManager(BaseCallbackManager):
|
||||
inheritable_callbacks: Callbacks = None,
|
||||
local_callbacks: Callbacks = None,
|
||||
verbose: bool = False,
|
||||
inheritable_tags: Optional[List[str]] = None,
|
||||
local_tags: Optional[List[str]] = None,
|
||||
inheritable_metadata: Optional[Dict[str, Any]] = None,
|
||||
local_metadata: Optional[Dict[str, Any]] = None,
|
||||
inheritable_tags: Optional[list[str]] = None,
|
||||
local_tags: Optional[list[str]] = None,
|
||||
inheritable_metadata: Optional[dict[str, Any]] = None,
|
||||
local_metadata: Optional[dict[str, Any]] = None,
|
||||
) -> CallbackManager:
|
||||
"""Configure the callback manager.
|
||||
|
||||
@@ -1583,8 +1577,8 @@ class CallbackManagerForChainGroup(CallbackManager):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
handlers: List[BaseCallbackHandler],
|
||||
inheritable_handlers: Optional[List[BaseCallbackHandler]] = None,
|
||||
handlers: list[BaseCallbackHandler],
|
||||
inheritable_handlers: Optional[list[BaseCallbackHandler]] = None,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
*,
|
||||
parent_run_manager: CallbackManagerForChainRun,
|
||||
@@ -1681,7 +1675,7 @@ class CallbackManagerForChainGroup(CallbackManager):
|
||||
manager.add_handler(handler, inherit=True)
|
||||
return manager
|
||||
|
||||
def on_chain_end(self, outputs: Union[Dict[str, Any], Any], **kwargs: Any) -> None:
|
||||
def on_chain_end(self, outputs: Union[dict[str, Any], Any], **kwargs: Any) -> None:
|
||||
"""Run when traced chain group ends.
|
||||
|
||||
Args:
|
||||
@@ -1716,11 +1710,11 @@ class AsyncCallbackManager(BaseCallbackManager):
|
||||
|
||||
async def on_llm_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
prompts: List[str],
|
||||
serialized: dict[str, Any],
|
||||
prompts: list[str],
|
||||
run_id: Optional[UUID] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[AsyncCallbackManagerForLLMRun]:
|
||||
) -> list[AsyncCallbackManagerForLLMRun]:
|
||||
"""Run when LLM starts running.
|
||||
|
||||
Args:
|
||||
@@ -1779,11 +1773,11 @@ class AsyncCallbackManager(BaseCallbackManager):
|
||||
|
||||
async def on_chat_model_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
messages: List[List[BaseMessage]],
|
||||
serialized: dict[str, Any],
|
||||
messages: list[list[BaseMessage]],
|
||||
run_id: Optional[UUID] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[AsyncCallbackManagerForLLMRun]:
|
||||
) -> list[AsyncCallbackManagerForLLMRun]:
|
||||
"""Async run when LLM starts running.
|
||||
|
||||
Args:
|
||||
@@ -1840,8 +1834,8 @@ class AsyncCallbackManager(BaseCallbackManager):
|
||||
|
||||
async def on_chain_start(
|
||||
self,
|
||||
serialized: Optional[Dict[str, Any]],
|
||||
inputs: Union[Dict[str, Any], Any],
|
||||
serialized: Optional[dict[str, Any]],
|
||||
inputs: Union[dict[str, Any], Any],
|
||||
run_id: Optional[UUID] = None,
|
||||
**kwargs: Any,
|
||||
) -> AsyncCallbackManagerForChainRun:
|
||||
@@ -1886,7 +1880,7 @@ class AsyncCallbackManager(BaseCallbackManager):
|
||||
|
||||
async def on_tool_start(
|
||||
self,
|
||||
serialized: Optional[Dict[str, Any]],
|
||||
serialized: Optional[dict[str, Any]],
|
||||
input_str: str,
|
||||
run_id: Optional[UUID] = None,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
@@ -1975,7 +1969,7 @@ class AsyncCallbackManager(BaseCallbackManager):
|
||||
|
||||
async def on_retriever_start(
|
||||
self,
|
||||
serialized: Optional[Dict[str, Any]],
|
||||
serialized: Optional[dict[str, Any]],
|
||||
query: str,
|
||||
run_id: Optional[UUID] = None,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
@@ -2027,10 +2021,10 @@ class AsyncCallbackManager(BaseCallbackManager):
|
||||
inheritable_callbacks: Callbacks = None,
|
||||
local_callbacks: Callbacks = None,
|
||||
verbose: bool = False,
|
||||
inheritable_tags: Optional[List[str]] = None,
|
||||
local_tags: Optional[List[str]] = None,
|
||||
inheritable_metadata: Optional[Dict[str, Any]] = None,
|
||||
local_metadata: Optional[Dict[str, Any]] = None,
|
||||
inheritable_tags: Optional[list[str]] = None,
|
||||
local_tags: Optional[list[str]] = None,
|
||||
inheritable_metadata: Optional[dict[str, Any]] = None,
|
||||
local_metadata: Optional[dict[str, Any]] = None,
|
||||
) -> AsyncCallbackManager:
|
||||
"""Configure the async callback manager.
|
||||
|
||||
@@ -2069,8 +2063,8 @@ class AsyncCallbackManagerForChainGroup(AsyncCallbackManager):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
handlers: List[BaseCallbackHandler],
|
||||
inheritable_handlers: Optional[List[BaseCallbackHandler]] = None,
|
||||
handlers: list[BaseCallbackHandler],
|
||||
inheritable_handlers: Optional[list[BaseCallbackHandler]] = None,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
*,
|
||||
parent_run_manager: AsyncCallbackManagerForChainRun,
|
||||
@@ -2169,7 +2163,7 @@ class AsyncCallbackManagerForChainGroup(AsyncCallbackManager):
|
||||
return manager
|
||||
|
||||
async def on_chain_end(
|
||||
self, outputs: Union[Dict[str, Any], Any], **kwargs: Any
|
||||
self, outputs: Union[dict[str, Any], Any], **kwargs: Any
|
||||
) -> None:
|
||||
"""Run when traced chain group ends.
|
||||
|
||||
@@ -2202,14 +2196,14 @@ H = TypeVar("H", bound=BaseCallbackHandler, covariant=True)
|
||||
|
||||
|
||||
def _configure(
|
||||
callback_manager_cls: Type[T],
|
||||
callback_manager_cls: type[T],
|
||||
inheritable_callbacks: Callbacks = None,
|
||||
local_callbacks: Callbacks = None,
|
||||
verbose: bool = False,
|
||||
inheritable_tags: Optional[List[str]] = None,
|
||||
local_tags: Optional[List[str]] = None,
|
||||
inheritable_metadata: Optional[Dict[str, Any]] = None,
|
||||
local_metadata: Optional[Dict[str, Any]] = None,
|
||||
inheritable_tags: Optional[list[str]] = None,
|
||||
local_tags: Optional[list[str]] = None,
|
||||
inheritable_metadata: Optional[dict[str, Any]] = None,
|
||||
local_metadata: Optional[dict[str, Any]] = None,
|
||||
) -> T:
|
||||
"""Configure the callback manager.
|
||||
|
||||
@@ -2354,7 +2348,7 @@ def _configure(
|
||||
and handler_class is not None
|
||||
)
|
||||
if var.get() is not None or create_one:
|
||||
var_handler = var.get() or cast(Type[BaseCallbackHandler], handler_class)()
|
||||
var_handler = var.get() or cast(type[BaseCallbackHandler], handler_class)()
|
||||
if handler_class is None:
|
||||
if not any(
|
||||
handler is var_handler # direct pointer comparison
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
|
||||
from langchain_core.callbacks.base import BaseCallbackHandler
|
||||
from langchain_core.utils import print_text
|
||||
@@ -23,7 +23,7 @@ class StdOutCallbackHandler(BaseCallbackHandler):
|
||||
self.color = color
|
||||
|
||||
def on_chain_start(
|
||||
self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any
|
||||
self, serialized: dict[str, Any], inputs: dict[str, Any], **kwargs: Any
|
||||
) -> None:
|
||||
"""Print out that we are entering a chain.
|
||||
|
||||
@@ -35,7 +35,7 @@ class StdOutCallbackHandler(BaseCallbackHandler):
|
||||
class_name = serialized.get("name", serialized.get("id", ["<unknown>"])[-1])
|
||||
print(f"\n\n\033[1m> Entering new {class_name} chain...\033[0m") # noqa: T201
|
||||
|
||||
def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None:
|
||||
def on_chain_end(self, outputs: dict[str, Any], **kwargs: Any) -> None:
|
||||
"""Print out that we finished a chain.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from typing import TYPE_CHECKING, Any, Dict, List
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langchain_core.callbacks.base import BaseCallbackHandler
|
||||
|
||||
@@ -17,7 +17,7 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
|
||||
"""Callback handler for streaming. Only works with LLMs that support streaming."""
|
||||
|
||||
def on_llm_start(
|
||||
self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
|
||||
self, serialized: dict[str, Any], prompts: list[str], **kwargs: Any
|
||||
) -> None:
|
||||
"""Run when LLM starts running.
|
||||
|
||||
@@ -29,8 +29,8 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
|
||||
|
||||
def on_chat_model_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
messages: List[List[BaseMessage]],
|
||||
serialized: dict[str, Any],
|
||||
messages: list[list[BaseMessage]],
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when LLM starts running.
|
||||
@@ -68,7 +68,7 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
|
||||
"""
|
||||
|
||||
def on_chain_start(
|
||||
self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any
|
||||
self, serialized: dict[str, Any], inputs: dict[str, Any], **kwargs: Any
|
||||
) -> None:
|
||||
"""Run when a chain starts running.
|
||||
|
||||
@@ -78,7 +78,7 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
|
||||
**kwargs (Any): Additional keyword arguments.
|
||||
"""
|
||||
|
||||
def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None:
|
||||
def on_chain_end(self, outputs: dict[str, Any], **kwargs: Any) -> None:
|
||||
"""Run when a chain ends running.
|
||||
|
||||
Args:
|
||||
@@ -95,7 +95,7 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
|
||||
"""
|
||||
|
||||
def on_tool_start(
|
||||
self, serialized: Dict[str, Any], input_str: str, **kwargs: Any
|
||||
self, serialized: dict[str, Any], input_str: str, **kwargs: Any
|
||||
) -> None:
|
||||
"""Run when the tool starts running.
|
||||
|
||||
|
||||
@@ -18,7 +18,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Sequence, Union
|
||||
from collections.abc import Sequence
|
||||
from typing import Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
@@ -87,7 +88,7 @@ class BaseChatMessageHistory(ABC):
|
||||
f.write("[]")
|
||||
"""
|
||||
|
||||
messages: List[BaseMessage]
|
||||
messages: list[BaseMessage]
|
||||
"""A property or attribute that returns a list of messages.
|
||||
|
||||
In general, getting the messages may involve IO to the underlying
|
||||
@@ -95,7 +96,7 @@ class BaseChatMessageHistory(ABC):
|
||||
latency.
|
||||
"""
|
||||
|
||||
async def aget_messages(self) -> List[BaseMessage]:
|
||||
async def aget_messages(self) -> list[BaseMessage]:
|
||||
"""Async version of getting messages.
|
||||
|
||||
Can over-ride this method to provide an efficient async implementation.
|
||||
@@ -204,10 +205,10 @@ class InMemoryChatMessageHistory(BaseChatMessageHistory, BaseModel):
|
||||
Stores messages in a memory list.
|
||||
"""
|
||||
|
||||
messages: List[BaseMessage] = Field(default_factory=list)
|
||||
messages: list[BaseMessage] = Field(default_factory=list)
|
||||
"""A list of messages stored in memory."""
|
||||
|
||||
async def aget_messages(self) -> List[BaseMessage]:
|
||||
async def aget_messages(self) -> list[BaseMessage]:
|
||||
"""Async version of getting messages.
|
||||
|
||||
Can over-ride this method to provide an efficient async implementation.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Iterator, List
|
||||
from collections.abc import Iterator
|
||||
|
||||
from langchain_core.chat_sessions import ChatSession
|
||||
|
||||
@@ -15,7 +15,7 @@ class BaseChatLoader(ABC):
|
||||
An iterator of chat sessions.
|
||||
"""
|
||||
|
||||
def load(self) -> List[ChatSession]:
|
||||
def load(self) -> list[ChatSession]:
|
||||
"""Eagerly load the chat sessions into memory.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""**Chat Sessions** are a collection of messages and function calls."""
|
||||
|
||||
from typing import Sequence, TypedDict
|
||||
from collections.abc import Sequence
|
||||
from typing import TypedDict
|
||||
|
||||
from langchain_core.messages import BaseMessage
|
||||
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, AsyncIterator, Iterator, List, Optional
|
||||
from collections.abc import AsyncIterator, Iterator
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from langchain_core.documents import Document
|
||||
from langchain_core.runnables import run_in_executor
|
||||
@@ -25,17 +26,17 @@ class BaseLoader(ABC): # noqa: B024
|
||||
|
||||
# Sub-classes should not implement this method directly. Instead, they
|
||||
# should implement the lazy load method.
|
||||
def load(self) -> List[Document]:
|
||||
def load(self) -> list[Document]:
|
||||
"""Load data into Document objects."""
|
||||
return list(self.lazy_load())
|
||||
|
||||
async def aload(self) -> List[Document]:
|
||||
async def aload(self) -> list[Document]:
|
||||
"""Load data into Document objects."""
|
||||
return [document async for document in self.alazy_load()]
|
||||
|
||||
def load_and_split(
|
||||
self, text_splitter: Optional[TextSplitter] = None
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
"""Load Documents and split into chunks. Chunks are returned as Documents.
|
||||
|
||||
Do not override this method. It should be considered to be deprecated!
|
||||
@@ -108,7 +109,7 @@ class BaseBlobParser(ABC):
|
||||
Generator of documents
|
||||
"""
|
||||
|
||||
def parse(self, blob: Blob) -> List[Document]:
|
||||
def parse(self, blob: Blob) -> list[Document]:
|
||||
"""Eagerly parse the blob into a document or documents.
|
||||
|
||||
This is a convenience method for interactive development environment.
|
||||
|
||||
@@ -8,7 +8,7 @@ In addition, content loading code should provide a lazy loading interface by def
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Iterable
|
||||
from collections.abc import Iterable
|
||||
|
||||
# Re-export Blob and PathLike for backwards compatibility
|
||||
from langchain_core.documents.base import Blob as Blob
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import datetime
|
||||
import json
|
||||
import uuid
|
||||
from typing import Any, Callable, Iterator, Optional, Sequence, Union
|
||||
from collections.abc import Iterator, Sequence
|
||||
from typing import Any, Callable, Optional, Union
|
||||
|
||||
from langsmith import Client as LangSmithClient
|
||||
|
||||
|
||||
@@ -2,9 +2,10 @@ from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import mimetypes
|
||||
from collections.abc import Generator
|
||||
from io import BufferedReader, BytesIO
|
||||
from pathlib import PurePath
|
||||
from typing import Any, Dict, Generator, List, Literal, Optional, Union, cast
|
||||
from typing import Any, Literal, Optional, Union, cast
|
||||
|
||||
from pydantic import ConfigDict, Field, field_validator, model_validator
|
||||
|
||||
@@ -138,7 +139,7 @@ class Blob(BaseMedia):
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def check_blob_is_valid(cls, values: Dict[str, Any]) -> Any:
|
||||
def check_blob_is_valid(cls, values: dict[str, Any]) -> Any:
|
||||
"""Verify that either data or path is provided."""
|
||||
if "data" not in values and "path" not in values:
|
||||
raise ValueError("Either data or path must be provided")
|
||||
@@ -285,7 +286,7 @@ class Document(BaseMedia):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object."""
|
||||
return ["langchain", "schema", "document"]
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, Sequence
|
||||
from collections.abc import Sequence
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, Any, Sequence
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langchain_core.runnables.config import run_in_executor
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""**Embeddings** interface."""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List
|
||||
|
||||
from langchain_core.runnables.config import run_in_executor
|
||||
|
||||
@@ -35,7 +34,7 @@ class Embeddings(ABC):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
||||
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||
"""Embed search docs.
|
||||
|
||||
Args:
|
||||
@@ -46,7 +45,7 @@ class Embeddings(ABC):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def embed_query(self, text: str) -> List[float]:
|
||||
def embed_query(self, text: str) -> list[float]:
|
||||
"""Embed query text.
|
||||
|
||||
Args:
|
||||
@@ -56,7 +55,7 @@ class Embeddings(ABC):
|
||||
Embedding.
|
||||
"""
|
||||
|
||||
async def aembed_documents(self, texts: List[str]) -> List[List[float]]:
|
||||
async def aembed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||
"""Asynchronous Embed search docs.
|
||||
|
||||
Args:
|
||||
@@ -67,7 +66,7 @@ class Embeddings(ABC):
|
||||
"""
|
||||
return await run_in_executor(None, self.embed_documents, texts)
|
||||
|
||||
async def aembed_query(self, text: str) -> List[float]:
|
||||
async def aembed_query(self, text: str) -> list[float]:
|
||||
"""Asynchronous Embed query text.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
# Please do not add additional fake embedding model implementations here.
|
||||
import hashlib
|
||||
from typing import List
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -51,15 +50,15 @@ class FakeEmbeddings(Embeddings, BaseModel):
|
||||
size: int
|
||||
"""The size of the embedding vector."""
|
||||
|
||||
def _get_embedding(self) -> List[float]:
|
||||
def _get_embedding(self) -> list[float]:
|
||||
import numpy as np # type: ignore[import-not-found, import-untyped]
|
||||
|
||||
return list(np.random.normal(size=self.size))
|
||||
|
||||
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
||||
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||
return [self._get_embedding() for _ in texts]
|
||||
|
||||
def embed_query(self, text: str) -> List[float]:
|
||||
def embed_query(self, text: str) -> list[float]:
|
||||
return self._get_embedding()
|
||||
|
||||
|
||||
@@ -106,7 +105,7 @@ class DeterministicFakeEmbedding(Embeddings, BaseModel):
|
||||
size: int
|
||||
"""The size of the embedding vector."""
|
||||
|
||||
def _get_embedding(self, seed: int) -> List[float]:
|
||||
def _get_embedding(self, seed: int) -> list[float]:
|
||||
import numpy as np # type: ignore[import-not-found, import-untyped]
|
||||
|
||||
# set the seed for the random generator
|
||||
@@ -117,8 +116,8 @@ class DeterministicFakeEmbedding(Embeddings, BaseModel):
|
||||
"""Get a seed for the random generator, using the hash of the text."""
|
||||
return int(hashlib.sha256(text.encode("utf-8")).hexdigest(), 16) % 10**8
|
||||
|
||||
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
||||
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||
return [self._get_embedding(seed=self._get_seed(_)) for _ in texts]
|
||||
|
||||
def embed_query(self, text: str) -> List[float]:
|
||||
def embed_query(self, text: str) -> list[float]:
|
||||
return self._get_embedding(seed=self._get_seed(text))
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Interface for selecting examples to include in prompts."""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any
|
||||
|
||||
from langchain_core.runnables import run_in_executor
|
||||
|
||||
@@ -10,14 +10,14 @@ class BaseExampleSelector(ABC):
|
||||
"""Interface for selecting examples to include in prompts."""
|
||||
|
||||
@abstractmethod
|
||||
def add_example(self, example: Dict[str, str]) -> Any:
|
||||
def add_example(self, example: dict[str, str]) -> Any:
|
||||
"""Add new example to store.
|
||||
|
||||
Args:
|
||||
example: A dictionary with keys as input variables
|
||||
and values as their values."""
|
||||
|
||||
async def aadd_example(self, example: Dict[str, str]) -> Any:
|
||||
async def aadd_example(self, example: dict[str, str]) -> Any:
|
||||
"""Async add new example to store.
|
||||
|
||||
Args:
|
||||
@@ -27,14 +27,14 @@ class BaseExampleSelector(ABC):
|
||||
return await run_in_executor(None, self.add_example, example)
|
||||
|
||||
@abstractmethod
|
||||
def select_examples(self, input_variables: Dict[str, str]) -> List[dict]:
|
||||
def select_examples(self, input_variables: dict[str, str]) -> list[dict]:
|
||||
"""Select which examples to use based on the inputs.
|
||||
|
||||
Args:
|
||||
input_variables: A dictionary with keys as input variables
|
||||
and values as their values."""
|
||||
|
||||
async def aselect_examples(self, input_variables: Dict[str, str]) -> List[dict]:
|
||||
async def aselect_examples(self, input_variables: dict[str, str]) -> list[dict]:
|
||||
"""Async select which examples to use based on the inputs.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Select examples based on length."""
|
||||
|
||||
import re
|
||||
from typing import Callable, Dict, List
|
||||
from typing import Callable
|
||||
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
from typing_extensions import Self
|
||||
@@ -17,7 +17,7 @@ def _get_length_based(text: str) -> int:
|
||||
class LengthBasedExampleSelector(BaseExampleSelector, BaseModel):
|
||||
"""Select examples based on length."""
|
||||
|
||||
examples: List[dict]
|
||||
examples: list[dict]
|
||||
"""A list of the examples that the prompt template expects."""
|
||||
|
||||
example_prompt: PromptTemplate
|
||||
@@ -29,10 +29,10 @@ class LengthBasedExampleSelector(BaseExampleSelector, BaseModel):
|
||||
max_length: int = 2048
|
||||
"""Max length for the prompt, beyond which examples are cut."""
|
||||
|
||||
example_text_lengths: List[int] = Field(default_factory=list) # :meta private:
|
||||
example_text_lengths: list[int] = Field(default_factory=list) # :meta private:
|
||||
"""Length of each example."""
|
||||
|
||||
def add_example(self, example: Dict[str, str]) -> None:
|
||||
def add_example(self, example: dict[str, str]) -> None:
|
||||
"""Add new example to list.
|
||||
|
||||
Args:
|
||||
@@ -43,7 +43,7 @@ class LengthBasedExampleSelector(BaseExampleSelector, BaseModel):
|
||||
string_example = self.example_prompt.format(**example)
|
||||
self.example_text_lengths.append(self.get_text_length(string_example))
|
||||
|
||||
async def aadd_example(self, example: Dict[str, str]) -> None:
|
||||
async def aadd_example(self, example: dict[str, str]) -> None:
|
||||
"""Async add new example to list.
|
||||
|
||||
Args:
|
||||
@@ -62,7 +62,7 @@ class LengthBasedExampleSelector(BaseExampleSelector, BaseModel):
|
||||
self.example_text_lengths = [self.get_text_length(eg) for eg in string_examples]
|
||||
return self
|
||||
|
||||
def select_examples(self, input_variables: Dict[str, str]) -> List[dict]:
|
||||
def select_examples(self, input_variables: dict[str, str]) -> list[dict]:
|
||||
"""Select which examples to use based on the input lengths.
|
||||
|
||||
Args:
|
||||
@@ -86,7 +86,7 @@ class LengthBasedExampleSelector(BaseExampleSelector, BaseModel):
|
||||
i += 1
|
||||
return examples
|
||||
|
||||
async def aselect_examples(self, input_variables: Dict[str, str]) -> List[dict]:
|
||||
async def aselect_examples(self, input_variables: dict[str, str]) -> list[dict]:
|
||||
"""Async select which examples to use based on the input lengths.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
@@ -15,7 +15,7 @@ if TYPE_CHECKING:
|
||||
from langchain_core.embeddings import Embeddings
|
||||
|
||||
|
||||
def sorted_values(values: Dict[str, str]) -> List[Any]:
|
||||
def sorted_values(values: dict[str, str]) -> list[Any]:
|
||||
"""Return a list of values in dict sorted by key.
|
||||
|
||||
Args:
|
||||
@@ -35,12 +35,12 @@ class _VectorStoreExampleSelector(BaseExampleSelector, BaseModel, ABC):
|
||||
"""VectorStore that contains information about examples."""
|
||||
k: int = 4
|
||||
"""Number of examples to select."""
|
||||
example_keys: Optional[List[str]] = None
|
||||
example_keys: Optional[list[str]] = None
|
||||
"""Optional keys to filter examples to."""
|
||||
input_keys: Optional[List[str]] = None
|
||||
input_keys: Optional[list[str]] = None
|
||||
"""Optional keys to filter input to. If provided, the search is based on
|
||||
the input variables instead of all variables."""
|
||||
vectorstore_kwargs: Optional[Dict[str, Any]] = None
|
||||
vectorstore_kwargs: Optional[dict[str, Any]] = None
|
||||
"""Extra arguments passed to similarity_search function of the vectorstore."""
|
||||
|
||||
model_config = ConfigDict(
|
||||
@@ -50,14 +50,14 @@ class _VectorStoreExampleSelector(BaseExampleSelector, BaseModel, ABC):
|
||||
|
||||
@staticmethod
|
||||
def _example_to_text(
|
||||
example: Dict[str, str], input_keys: Optional[List[str]]
|
||||
example: dict[str, str], input_keys: Optional[list[str]]
|
||||
) -> str:
|
||||
if input_keys:
|
||||
return " ".join(sorted_values({key: example[key] for key in input_keys}))
|
||||
else:
|
||||
return " ".join(sorted_values(example))
|
||||
|
||||
def _documents_to_examples(self, documents: List[Document]) -> List[dict]:
|
||||
def _documents_to_examples(self, documents: list[Document]) -> list[dict]:
|
||||
# Get the examples from the metadata.
|
||||
# This assumes that examples are stored in metadata.
|
||||
examples = [dict(e.metadata) for e in documents]
|
||||
@@ -66,7 +66,7 @@ class _VectorStoreExampleSelector(BaseExampleSelector, BaseModel, ABC):
|
||||
examples = [{k: eg[k] for k in self.example_keys} for eg in examples]
|
||||
return examples
|
||||
|
||||
def add_example(self, example: Dict[str, str]) -> str:
|
||||
def add_example(self, example: dict[str, str]) -> str:
|
||||
"""Add a new example to vectorstore.
|
||||
|
||||
Args:
|
||||
@@ -81,7 +81,7 @@ class _VectorStoreExampleSelector(BaseExampleSelector, BaseModel, ABC):
|
||||
)
|
||||
return ids[0]
|
||||
|
||||
async def aadd_example(self, example: Dict[str, str]) -> str:
|
||||
async def aadd_example(self, example: dict[str, str]) -> str:
|
||||
"""Async add new example to vectorstore.
|
||||
|
||||
Args:
|
||||
@@ -100,7 +100,7 @@ class _VectorStoreExampleSelector(BaseExampleSelector, BaseModel, ABC):
|
||||
class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector):
|
||||
"""Select examples based on semantic similarity."""
|
||||
|
||||
def select_examples(self, input_variables: Dict[str, str]) -> List[dict]:
|
||||
def select_examples(self, input_variables: dict[str, str]) -> list[dict]:
|
||||
"""Select examples based on semantic similarity.
|
||||
|
||||
Args:
|
||||
@@ -118,7 +118,7 @@ class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector):
|
||||
)
|
||||
return self._documents_to_examples(example_docs)
|
||||
|
||||
async def aselect_examples(self, input_variables: Dict[str, str]) -> List[dict]:
|
||||
async def aselect_examples(self, input_variables: dict[str, str]) -> list[dict]:
|
||||
"""Asynchronously select examples based on semantic similarity.
|
||||
|
||||
Args:
|
||||
@@ -139,13 +139,13 @@ class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector):
|
||||
@classmethod
|
||||
def from_examples(
|
||||
cls,
|
||||
examples: List[dict],
|
||||
examples: list[dict],
|
||||
embeddings: Embeddings,
|
||||
vectorstore_cls: Type[VectorStore],
|
||||
vectorstore_cls: type[VectorStore],
|
||||
k: int = 4,
|
||||
input_keys: Optional[List[str]] = None,
|
||||
input_keys: Optional[list[str]] = None,
|
||||
*,
|
||||
example_keys: Optional[List[str]] = None,
|
||||
example_keys: Optional[list[str]] = None,
|
||||
vectorstore_kwargs: Optional[dict] = None,
|
||||
**vectorstore_cls_kwargs: Any,
|
||||
) -> SemanticSimilarityExampleSelector:
|
||||
@@ -183,13 +183,13 @@ class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector):
|
||||
@classmethod
|
||||
async def afrom_examples(
|
||||
cls,
|
||||
examples: List[dict],
|
||||
examples: list[dict],
|
||||
embeddings: Embeddings,
|
||||
vectorstore_cls: Type[VectorStore],
|
||||
vectorstore_cls: type[VectorStore],
|
||||
k: int = 4,
|
||||
input_keys: Optional[List[str]] = None,
|
||||
input_keys: Optional[list[str]] = None,
|
||||
*,
|
||||
example_keys: Optional[List[str]] = None,
|
||||
example_keys: Optional[list[str]] = None,
|
||||
vectorstore_kwargs: Optional[dict] = None,
|
||||
**vectorstore_cls_kwargs: Any,
|
||||
) -> SemanticSimilarityExampleSelector:
|
||||
@@ -235,7 +235,7 @@ class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector):
|
||||
fetch_k: int = 20
|
||||
"""Number of examples to fetch to rerank."""
|
||||
|
||||
def select_examples(self, input_variables: Dict[str, str]) -> List[dict]:
|
||||
def select_examples(self, input_variables: dict[str, str]) -> list[dict]:
|
||||
"""Select examples based on Max Marginal Relevance.
|
||||
|
||||
Args:
|
||||
@@ -251,7 +251,7 @@ class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector):
|
||||
)
|
||||
return self._documents_to_examples(example_docs)
|
||||
|
||||
async def aselect_examples(self, input_variables: Dict[str, str]) -> List[dict]:
|
||||
async def aselect_examples(self, input_variables: dict[str, str]) -> list[dict]:
|
||||
"""Asynchronously select examples based on Max Marginal Relevance.
|
||||
|
||||
Args:
|
||||
@@ -270,13 +270,13 @@ class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector):
|
||||
@classmethod
|
||||
def from_examples(
|
||||
cls,
|
||||
examples: List[dict],
|
||||
examples: list[dict],
|
||||
embeddings: Embeddings,
|
||||
vectorstore_cls: Type[VectorStore],
|
||||
vectorstore_cls: type[VectorStore],
|
||||
k: int = 4,
|
||||
input_keys: Optional[List[str]] = None,
|
||||
input_keys: Optional[list[str]] = None,
|
||||
fetch_k: int = 20,
|
||||
example_keys: Optional[List[str]] = None,
|
||||
example_keys: Optional[list[str]] = None,
|
||||
vectorstore_kwargs: Optional[dict] = None,
|
||||
**vectorstore_cls_kwargs: Any,
|
||||
) -> MaxMarginalRelevanceExampleSelector:
|
||||
@@ -317,14 +317,14 @@ class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector):
|
||||
@classmethod
|
||||
async def afrom_examples(
|
||||
cls,
|
||||
examples: List[dict],
|
||||
examples: list[dict],
|
||||
embeddings: Embeddings,
|
||||
vectorstore_cls: Type[VectorStore],
|
||||
vectorstore_cls: type[VectorStore],
|
||||
*,
|
||||
k: int = 4,
|
||||
input_keys: Optional[List[str]] = None,
|
||||
input_keys: Optional[list[str]] = None,
|
||||
fetch_k: int = 20,
|
||||
example_keys: Optional[List[str]] = None,
|
||||
example_keys: Optional[list[str]] = None,
|
||||
vectorstore_kwargs: Optional[dict] = None,
|
||||
**vectorstore_cls_kwargs: Any,
|
||||
) -> MaxMarginalRelevanceExampleSelector:
|
||||
|
||||
@@ -1,14 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from collections.abc import AsyncIterable, Collection, Iterable, Iterator
|
||||
from typing import (
|
||||
Any,
|
||||
AsyncIterable,
|
||||
ClassVar,
|
||||
Collection,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
)
|
||||
|
||||
@@ -68,7 +64,7 @@ class Node(Serializable):
|
||||
"""Text contained by the node."""
|
||||
metadata: dict = Field(default_factory=dict)
|
||||
"""Metadata for the node."""
|
||||
links: List[Link] = Field(default_factory=list)
|
||||
links: list[Link] = Field(default_factory=list)
|
||||
"""Links associated with the node."""
|
||||
|
||||
|
||||
@@ -189,7 +185,7 @@ class GraphVectorStore(VectorStore):
|
||||
*,
|
||||
ids: Optional[Iterable[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""Run more texts through the embeddings and add to the vectorstore.
|
||||
|
||||
The Links present in the metadata field `links` will be extracted to create
|
||||
@@ -237,7 +233,7 @@ class GraphVectorStore(VectorStore):
|
||||
*,
|
||||
ids: Optional[Iterable[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""Run more texts through the embeddings and add to the vectorstore.
|
||||
|
||||
The Links present in the metadata field `links` will be extracted to create
|
||||
@@ -282,7 +278,7 @@ class GraphVectorStore(VectorStore):
|
||||
self,
|
||||
documents: Iterable[Document],
|
||||
**kwargs: Any,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""Run more documents through the embeddings and add to the vectorstore.
|
||||
|
||||
The Links present in the document metadata field `links` will be extracted to
|
||||
@@ -332,7 +328,7 @@ class GraphVectorStore(VectorStore):
|
||||
self,
|
||||
documents: Iterable[Document],
|
||||
**kwargs: Any,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""Run more documents through the embeddings and add to the vectorstore.
|
||||
|
||||
The Links present in the document metadata field `links` will be extracted to
|
||||
@@ -535,7 +531,7 @@ class GraphVectorStore(VectorStore):
|
||||
|
||||
def similarity_search(
|
||||
self, query: str, k: int = 4, **kwargs: Any
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
return list(self.traversal_search(query, k=k, depth=0))
|
||||
|
||||
def max_marginal_relevance_search(
|
||||
@@ -545,7 +541,7 @@ class GraphVectorStore(VectorStore):
|
||||
fetch_k: int = 20,
|
||||
lambda_mult: float = 0.5,
|
||||
**kwargs: Any,
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
return list(
|
||||
self.mmr_traversal_search(
|
||||
query, k=k, fetch_k=fetch_k, lambda_mult=lambda_mult, depth=0
|
||||
@@ -554,10 +550,10 @@ class GraphVectorStore(VectorStore):
|
||||
|
||||
async def asimilarity_search(
|
||||
self, query: str, k: int = 4, **kwargs: Any
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
return [doc async for doc in self.atraversal_search(query, k=k, depth=0)]
|
||||
|
||||
def search(self, query: str, search_type: str, **kwargs: Any) -> List[Document]:
|
||||
def search(self, query: str, search_type: str, **kwargs: Any) -> list[Document]:
|
||||
if search_type == "similarity":
|
||||
return self.similarity_search(query, **kwargs)
|
||||
elif search_type == "similarity_score_threshold":
|
||||
@@ -580,7 +576,7 @@ class GraphVectorStore(VectorStore):
|
||||
|
||||
async def asearch(
|
||||
self, query: str, search_type: str, **kwargs: Any
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
if search_type == "similarity":
|
||||
return await self.asimilarity_search(query, **kwargs)
|
||||
elif search_type == "similarity_score_threshold":
|
||||
@@ -679,7 +675,7 @@ class GraphVectorStoreRetriever(VectorStoreRetriever):
|
||||
|
||||
def _get_relevant_documents(
|
||||
self, query: str, *, run_manager: CallbackManagerForRetrieverRun
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
if self.search_type == "traversal":
|
||||
return list(self.vectorstore.traversal_search(query, **self.search_kwargs))
|
||||
elif self.search_type == "mmr_traversal":
|
||||
@@ -691,7 +687,7 @@ class GraphVectorStoreRetriever(VectorStoreRetriever):
|
||||
|
||||
async def _aget_relevant_documents(
|
||||
self, query: str, *, run_manager: AsyncCallbackManagerForRetrieverRun
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
if self.search_type == "traversal":
|
||||
return [
|
||||
doc
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
from typing import Iterable, List, Literal, Union
|
||||
from typing import Literal, Union
|
||||
|
||||
from langchain_core._api import beta
|
||||
from langchain_core.documents import Document
|
||||
@@ -41,7 +42,7 @@ METADATA_LINKS_KEY = "links"
|
||||
|
||||
|
||||
@beta()
|
||||
def get_links(doc: Document) -> List[Link]:
|
||||
def get_links(doc: Document) -> list[Link]:
|
||||
"""Get the links from a document.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -5,20 +5,13 @@ from __future__ import annotations
|
||||
import hashlib
|
||||
import json
|
||||
import uuid
|
||||
from collections.abc import AsyncIterable, AsyncIterator, Iterable, Iterator, Sequence
|
||||
from itertools import islice
|
||||
from typing import (
|
||||
Any,
|
||||
AsyncIterable,
|
||||
AsyncIterator,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
Union,
|
||||
@@ -71,7 +64,7 @@ class _HashedDocument(Document):
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def calculate_hashes(cls, values: Dict[str, Any]) -> Any:
|
||||
def calculate_hashes(cls, values: dict[str, Any]) -> Any:
|
||||
"""Root validator to calculate content and metadata hash."""
|
||||
content = values.get("page_content", "")
|
||||
metadata = values.get("metadata", {})
|
||||
@@ -125,7 +118,7 @@ class _HashedDocument(Document):
|
||||
)
|
||||
|
||||
|
||||
def _batch(size: int, iterable: Iterable[T]) -> Iterator[List[T]]:
|
||||
def _batch(size: int, iterable: Iterable[T]) -> Iterator[list[T]]:
|
||||
"""Utility batching function."""
|
||||
it = iter(iterable)
|
||||
while True:
|
||||
@@ -135,9 +128,9 @@ def _batch(size: int, iterable: Iterable[T]) -> Iterator[List[T]]:
|
||||
yield chunk
|
||||
|
||||
|
||||
async def _abatch(size: int, iterable: AsyncIterable[T]) -> AsyncIterator[List[T]]:
|
||||
async def _abatch(size: int, iterable: AsyncIterable[T]) -> AsyncIterator[list[T]]:
|
||||
"""Utility batching function."""
|
||||
batch: List[T] = []
|
||||
batch: list[T] = []
|
||||
async for element in iterable:
|
||||
if len(batch) < size:
|
||||
batch.append(element)
|
||||
@@ -171,7 +164,7 @@ def _deduplicate_in_order(
|
||||
hashed_documents: Iterable[_HashedDocument],
|
||||
) -> Iterator[_HashedDocument]:
|
||||
"""Deduplicate a list of hashed documents while preserving order."""
|
||||
seen: Set[str] = set()
|
||||
seen: set[str] = set()
|
||||
|
||||
for hashed_doc in hashed_documents:
|
||||
if hashed_doc.hash_ not in seen:
|
||||
@@ -349,7 +342,7 @@ def index(
|
||||
uids = []
|
||||
docs_to_index = []
|
||||
uids_to_refresh = []
|
||||
seen_docs: Set[str] = set()
|
||||
seen_docs: set[str] = set()
|
||||
for hashed_doc, doc_exists in zip(hashed_docs, exists_batch):
|
||||
if doc_exists:
|
||||
if force_update:
|
||||
@@ -589,7 +582,7 @@ async def aindex(
|
||||
uids: list[str] = []
|
||||
docs_to_index: list[Document] = []
|
||||
uids_to_refresh = []
|
||||
seen_docs: Set[str] = set()
|
||||
seen_docs: set[str] = set()
|
||||
for hashed_doc, doc_exists in zip(hashed_docs, exists_batch):
|
||||
if doc_exists:
|
||||
if force_update:
|
||||
|
||||
@@ -3,7 +3,8 @@ from __future__ import annotations
|
||||
import abc
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Dict, List, Optional, Sequence, TypedDict
|
||||
from collections.abc import Sequence
|
||||
from typing import Any, Optional, TypedDict
|
||||
|
||||
from langchain_core._api import beta
|
||||
from langchain_core.documents import Document
|
||||
@@ -144,7 +145,7 @@ class RecordManager(ABC):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def exists(self, keys: Sequence[str]) -> List[bool]:
|
||||
def exists(self, keys: Sequence[str]) -> list[bool]:
|
||||
"""Check if the provided keys exist in the database.
|
||||
|
||||
Args:
|
||||
@@ -155,7 +156,7 @@ class RecordManager(ABC):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def aexists(self, keys: Sequence[str]) -> List[bool]:
|
||||
async def aexists(self, keys: Sequence[str]) -> list[bool]:
|
||||
"""Asynchronously check if the provided keys exist in the database.
|
||||
|
||||
Args:
|
||||
@@ -173,7 +174,7 @@ class RecordManager(ABC):
|
||||
after: Optional[float] = None,
|
||||
group_ids: Optional[Sequence[str]] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""List records in the database based on the provided filters.
|
||||
|
||||
Args:
|
||||
@@ -194,7 +195,7 @@ class RecordManager(ABC):
|
||||
after: Optional[float] = None,
|
||||
group_ids: Optional[Sequence[str]] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""Asynchronously list records in the database based on the provided filters.
|
||||
|
||||
Args:
|
||||
@@ -241,7 +242,7 @@ class InMemoryRecordManager(RecordManager):
|
||||
super().__init__(namespace)
|
||||
# Each key points to a dictionary
|
||||
# of {'group_id': group_id, 'updated_at': timestamp}
|
||||
self.records: Dict[str, _Record] = {}
|
||||
self.records: dict[str, _Record] = {}
|
||||
self.namespace = namespace
|
||||
|
||||
def create_schema(self) -> None:
|
||||
@@ -325,7 +326,7 @@ class InMemoryRecordManager(RecordManager):
|
||||
"""
|
||||
self.update(keys, group_ids=group_ids, time_at_least=time_at_least)
|
||||
|
||||
def exists(self, keys: Sequence[str]) -> List[bool]:
|
||||
def exists(self, keys: Sequence[str]) -> list[bool]:
|
||||
"""Check if the provided keys exist in the database.
|
||||
|
||||
Args:
|
||||
@@ -336,7 +337,7 @@ class InMemoryRecordManager(RecordManager):
|
||||
"""
|
||||
return [key in self.records for key in keys]
|
||||
|
||||
async def aexists(self, keys: Sequence[str]) -> List[bool]:
|
||||
async def aexists(self, keys: Sequence[str]) -> list[bool]:
|
||||
"""Async check if the provided keys exist in the database.
|
||||
|
||||
Args:
|
||||
@@ -354,7 +355,7 @@ class InMemoryRecordManager(RecordManager):
|
||||
after: Optional[float] = None,
|
||||
group_ids: Optional[Sequence[str]] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""List records in the database based on the provided filters.
|
||||
|
||||
Args:
|
||||
@@ -390,7 +391,7 @@ class InMemoryRecordManager(RecordManager):
|
||||
after: Optional[float] = None,
|
||||
group_ids: Optional[Sequence[str]] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""Async list records in the database based on the provided filters.
|
||||
|
||||
Args:
|
||||
@@ -449,9 +450,9 @@ class UpsertResponse(TypedDict):
|
||||
indexed to avoid this issue.
|
||||
"""
|
||||
|
||||
succeeded: List[str]
|
||||
succeeded: list[str]
|
||||
"""The IDs that were successfully indexed."""
|
||||
failed: List[str]
|
||||
failed: list[str]
|
||||
"""The IDs that failed to index."""
|
||||
|
||||
|
||||
@@ -562,7 +563,7 @@ class DocumentIndex(BaseRetriever):
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete(self, ids: Optional[List[str]] = None, **kwargs: Any) -> DeleteResponse:
|
||||
def delete(self, ids: Optional[list[str]] = None, **kwargs: Any) -> DeleteResponse:
|
||||
"""Delete by IDs or other criteria.
|
||||
|
||||
Calling delete without any input parameters should raise a ValueError!
|
||||
@@ -579,7 +580,7 @@ class DocumentIndex(BaseRetriever):
|
||||
"""
|
||||
|
||||
async def adelete(
|
||||
self, ids: Optional[List[str]] = None, **kwargs: Any
|
||||
self, ids: Optional[list[str]] = None, **kwargs: Any
|
||||
) -> DeleteResponse:
|
||||
"""Delete by IDs or other criteria. Async variant.
|
||||
|
||||
@@ -607,7 +608,7 @@ class DocumentIndex(BaseRetriever):
|
||||
ids: Sequence[str],
|
||||
/,
|
||||
**kwargs: Any,
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
"""Get documents by id.
|
||||
|
||||
Fewer documents may be returned than requested if some IDs are not found or
|
||||
@@ -633,7 +634,7 @@ class DocumentIndex(BaseRetriever):
|
||||
ids: Sequence[str],
|
||||
/,
|
||||
**kwargs: Any,
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
"""Get documents by id.
|
||||
|
||||
Fewer documents may be returned than requested if some IDs are not found or
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import uuid
|
||||
from typing import Any, Dict, List, Optional, Sequence, cast
|
||||
from collections.abc import Sequence
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
@@ -22,7 +23,7 @@ class InMemoryDocumentIndex(DocumentIndex):
|
||||
.. versionadded:: 0.2.29
|
||||
"""
|
||||
|
||||
store: Dict[str, Document] = Field(default_factory=dict)
|
||||
store: dict[str, Document] = Field(default_factory=dict)
|
||||
top_k: int = 4
|
||||
|
||||
def upsert(self, items: Sequence[Document], /, **kwargs: Any) -> UpsertResponse:
|
||||
@@ -43,7 +44,7 @@ class InMemoryDocumentIndex(DocumentIndex):
|
||||
|
||||
return UpsertResponse(succeeded=ok_ids, failed=[])
|
||||
|
||||
def delete(self, ids: Optional[List[str]] = None, **kwargs: Any) -> DeleteResponse:
|
||||
def delete(self, ids: Optional[list[str]] = None, **kwargs: Any) -> DeleteResponse:
|
||||
"""Delete by ID."""
|
||||
if ids is None:
|
||||
raise ValueError("IDs must be provided for deletion")
|
||||
@@ -59,7 +60,7 @@ class InMemoryDocumentIndex(DocumentIndex):
|
||||
succeeded=ok_ids, num_deleted=len(ok_ids), num_failed=0, failed=[]
|
||||
)
|
||||
|
||||
def get(self, ids: Sequence[str], /, **kwargs: Any) -> List[Document]:
|
||||
def get(self, ids: Sequence[str], /, **kwargs: Any) -> list[Document]:
|
||||
"""Get by ids."""
|
||||
found_documents = []
|
||||
|
||||
@@ -71,7 +72,7 @@ class InMemoryDocumentIndex(DocumentIndex):
|
||||
|
||||
def _get_relevant_documents(
|
||||
self, query: str, *, run_manager: CallbackManagerForRetrieverRun
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
counts_by_doc = []
|
||||
|
||||
for document in self.store.values():
|
||||
|
||||
@@ -1,19 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from functools import lru_cache
|
||||
from collections.abc import Mapping, Sequence
|
||||
from functools import cache
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Literal,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
@@ -51,11 +46,11 @@ class LangSmithParams(TypedDict, total=False):
|
||||
"""Temperature for generation."""
|
||||
ls_max_tokens: Optional[int]
|
||||
"""Max tokens for generation."""
|
||||
ls_stop: Optional[List[str]]
|
||||
ls_stop: Optional[list[str]]
|
||||
"""Stop words for generation."""
|
||||
|
||||
|
||||
@lru_cache(maxsize=None) # Cache the tokenizer
|
||||
@cache # Cache the tokenizer
|
||||
def get_tokenizer() -> Any:
|
||||
"""Get a GPT-2 tokenizer instance.
|
||||
|
||||
@@ -74,7 +69,7 @@ def get_tokenizer() -> Any:
|
||||
return GPT2TokenizerFast.from_pretrained("gpt2")
|
||||
|
||||
|
||||
def _get_token_ids_default_method(text: str) -> List[int]:
|
||||
def _get_token_ids_default_method(text: str) -> list[int]:
|
||||
"""Encode the text into token IDs."""
|
||||
# get the cached tokenizer
|
||||
tokenizer = get_tokenizer()
|
||||
@@ -117,11 +112,11 @@ class BaseLanguageModel(
|
||||
"""Whether to print out response text."""
|
||||
callbacks: Callbacks = Field(default=None, exclude=True)
|
||||
"""Callbacks to add to the run trace."""
|
||||
tags: Optional[List[str]] = Field(default=None, exclude=True)
|
||||
tags: Optional[list[str]] = Field(default=None, exclude=True)
|
||||
"""Tags to add to the run trace."""
|
||||
metadata: Optional[Dict[str, Any]] = Field(default=None, exclude=True)
|
||||
metadata: Optional[dict[str, Any]] = Field(default=None, exclude=True)
|
||||
"""Metadata to add to the run trace."""
|
||||
custom_get_token_ids: Optional[Callable[[str], List[int]]] = Field(
|
||||
custom_get_token_ids: Optional[Callable[[str], list[int]]] = Field(
|
||||
default=None, exclude=True
|
||||
)
|
||||
"""Optional encoder to use for counting tokens."""
|
||||
@@ -161,14 +156,14 @@ class BaseLanguageModel(
|
||||
return Union[
|
||||
str,
|
||||
Union[StringPromptValue, ChatPromptValueConcrete],
|
||||
List[AnyMessage],
|
||||
list[AnyMessage],
|
||||
]
|
||||
|
||||
@abstractmethod
|
||||
def generate_prompt(
|
||||
self,
|
||||
prompts: List[PromptValue],
|
||||
stop: Optional[List[str]] = None,
|
||||
prompts: list[PromptValue],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Callbacks = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
@@ -202,8 +197,8 @@ class BaseLanguageModel(
|
||||
@abstractmethod
|
||||
async def agenerate_prompt(
|
||||
self,
|
||||
prompts: List[PromptValue],
|
||||
stop: Optional[List[str]] = None,
|
||||
prompts: list[PromptValue],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Callbacks = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
@@ -235,8 +230,8 @@ class BaseLanguageModel(
|
||||
"""
|
||||
|
||||
def with_structured_output(
|
||||
self, schema: Union[Dict, Type[BaseModel]], **kwargs: Any
|
||||
) -> Runnable[LanguageModelInput, Union[Dict, BaseModel]]:
|
||||
self, schema: Union[dict, type[BaseModel]], **kwargs: Any
|
||||
) -> Runnable[LanguageModelInput, Union[dict, BaseModel]]:
|
||||
"""Not implemented on this class."""
|
||||
# Implement this on child class if there is a way of steering the model to
|
||||
# generate responses that match a given schema.
|
||||
@@ -267,7 +262,7 @@ class BaseLanguageModel(
|
||||
@abstractmethod
|
||||
def predict_messages(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
messages: list[BaseMessage],
|
||||
*,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
**kwargs: Any,
|
||||
@@ -313,7 +308,7 @@ class BaseLanguageModel(
|
||||
@abstractmethod
|
||||
async def apredict_messages(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
messages: list[BaseMessage],
|
||||
*,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
**kwargs: Any,
|
||||
@@ -339,7 +334,7 @@ class BaseLanguageModel(
|
||||
"""Get the identifying parameters."""
|
||||
return self.lc_attributes
|
||||
|
||||
def get_token_ids(self, text: str) -> List[int]:
|
||||
def get_token_ids(self, text: str) -> list[int]:
|
||||
"""Return the ordered ids of the tokens in a text.
|
||||
|
||||
Args:
|
||||
@@ -367,7 +362,7 @@ class BaseLanguageModel(
|
||||
"""
|
||||
return len(self.get_token_ids(text))
|
||||
|
||||
def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int:
|
||||
def get_num_tokens_from_messages(self, messages: list[BaseMessage]) -> int:
|
||||
"""Get the number of tokens in the messages.
|
||||
|
||||
Useful for checking if an input fits in a model's context window.
|
||||
@@ -381,7 +376,7 @@ class BaseLanguageModel(
|
||||
return sum([self.get_num_tokens(get_buffer_string([m])) for m in messages])
|
||||
|
||||
@classmethod
|
||||
def _all_required_field_names(cls) -> Set:
|
||||
def _all_required_field_names(cls) -> set:
|
||||
"""DEPRECATED: Kept for backwards compatibility.
|
||||
|
||||
Use get_pydantic_field_names.
|
||||
|
||||
@@ -3,23 +3,19 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
import inspect
|
||||
import json
|
||||
import typing
|
||||
import uuid
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import AsyncIterator, Iterator, Sequence
|
||||
from functools import cached_property
|
||||
from operator import itemgetter
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
Sequence,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
@@ -223,7 +219,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def raise_deprecation(cls, values: Dict) -> Any:
|
||||
def raise_deprecation(cls, values: dict) -> Any:
|
||||
"""Raise deprecation warning if callback_manager is used.
|
||||
|
||||
Args:
|
||||
@@ -277,7 +273,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> BaseMessage:
|
||||
config = ensure_config(config)
|
||||
@@ -300,7 +296,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> BaseMessage:
|
||||
config = ensure_config(config)
|
||||
@@ -356,7 +352,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Iterator[BaseMessageChunk]:
|
||||
if not self._should_stream(async_api=False, **{**kwargs, **{"stream": True}}):
|
||||
@@ -426,7 +422,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[BaseMessageChunk]:
|
||||
if not self._should_stream(async_api=True, **{**kwargs, **{"stream": True}}):
|
||||
@@ -499,12 +495,12 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
# --- Custom methods ---
|
||||
|
||||
def _combine_llm_outputs(self, llm_outputs: List[Optional[dict]]) -> dict:
|
||||
def _combine_llm_outputs(self, llm_outputs: list[Optional[dict]]) -> dict:
|
||||
return {}
|
||||
|
||||
def _get_invocation_params(
|
||||
self,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> dict:
|
||||
params = self.dict()
|
||||
@@ -513,7 +509,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
def _get_ls_params(
|
||||
self,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> LangSmithParams:
|
||||
"""Get standard params for tracing."""
|
||||
@@ -550,7 +546,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
return ls_params
|
||||
|
||||
def _get_llm_string(self, stop: Optional[List[str]] = None, **kwargs: Any) -> str:
|
||||
def _get_llm_string(self, stop: Optional[list[str]] = None, **kwargs: Any) -> str:
|
||||
if self.is_lc_serializable():
|
||||
params = {**kwargs, **{"stop": stop}}
|
||||
param_string = str(sorted([(k, v) for k, v in params.items()]))
|
||||
@@ -567,12 +563,12 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
def generate(
|
||||
self,
|
||||
messages: List[List[BaseMessage]],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[list[BaseMessage]],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Callbacks = None,
|
||||
*,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
run_name: Optional[str] = None,
|
||||
run_id: Optional[uuid.UUID] = None,
|
||||
**kwargs: Any,
|
||||
@@ -658,12 +654,12 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
async def agenerate(
|
||||
self,
|
||||
messages: List[List[BaseMessage]],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[list[BaseMessage]],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Callbacks = None,
|
||||
*,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
run_name: Optional[str] = None,
|
||||
run_id: Optional[uuid.UUID] = None,
|
||||
**kwargs: Any,
|
||||
@@ -777,8 +773,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
def generate_prompt(
|
||||
self,
|
||||
prompts: List[PromptValue],
|
||||
stop: Optional[List[str]] = None,
|
||||
prompts: list[PromptValue],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Callbacks = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
@@ -787,8 +783,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
async def agenerate_prompt(
|
||||
self,
|
||||
prompts: List[PromptValue],
|
||||
stop: Optional[List[str]] = None,
|
||||
prompts: list[PromptValue],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Callbacks = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
@@ -799,8 +795,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
def _generate_with_cache(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> ChatResult:
|
||||
@@ -839,7 +835,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
run_manager=run_manager,
|
||||
**kwargs,
|
||||
):
|
||||
chunks: List[ChatGenerationChunk] = []
|
||||
chunks: list[ChatGenerationChunk] = []
|
||||
for chunk in self._stream(messages, stop=stop, **kwargs):
|
||||
chunk.message.response_metadata = _gen_info_and_msg_metadata(chunk)
|
||||
if run_manager:
|
||||
@@ -876,8 +872,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
async def _agenerate_with_cache(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> ChatResult:
|
||||
@@ -916,7 +912,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
run_manager=run_manager,
|
||||
**kwargs,
|
||||
):
|
||||
chunks: List[ChatGenerationChunk] = []
|
||||
chunks: list[ChatGenerationChunk] = []
|
||||
async for chunk in self._astream(messages, stop=stop, **kwargs):
|
||||
chunk.message.response_metadata = _gen_info_and_msg_metadata(chunk)
|
||||
if run_manager:
|
||||
@@ -954,8 +950,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
@abstractmethod
|
||||
def _generate(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> ChatResult:
|
||||
@@ -963,8 +959,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
async def _agenerate(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> ChatResult:
|
||||
@@ -980,8 +976,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
def _stream(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> Iterator[ChatGenerationChunk]:
|
||||
@@ -989,8 +985,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
async def _astream(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[ChatGenerationChunk]:
|
||||
@@ -1017,8 +1013,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||
def __call__(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Callbacks = None,
|
||||
**kwargs: Any,
|
||||
) -> BaseMessage:
|
||||
@@ -1032,8 +1028,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
async def _call_async(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Callbacks = None,
|
||||
**kwargs: Any,
|
||||
) -> BaseMessage:
|
||||
@@ -1048,7 +1044,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||
def call_as_llm(
|
||||
self, message: str, stop: Optional[List[str]] = None, **kwargs: Any
|
||||
self, message: str, stop: Optional[list[str]] = None, **kwargs: Any
|
||||
) -> str:
|
||||
return self.predict(message, stop=stop, **kwargs)
|
||||
|
||||
@@ -1069,7 +1065,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||
def predict_messages(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
messages: list[BaseMessage],
|
||||
*,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
**kwargs: Any,
|
||||
@@ -1099,7 +1095,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
|
||||
async def apredict_messages(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
messages: list[BaseMessage],
|
||||
*,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
**kwargs: Any,
|
||||
@@ -1115,7 +1111,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
def _llm_type(self) -> str:
|
||||
"""Return type of chat model."""
|
||||
|
||||
def dict(self, **kwargs: Any) -> Dict:
|
||||
def dict(self, **kwargs: Any) -> dict:
|
||||
"""Return a dictionary of the LLM."""
|
||||
starter_dict = dict(self._identifying_params)
|
||||
starter_dict["_type"] = self._llm_type
|
||||
@@ -1123,18 +1119,18 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
||||
|
||||
def bind_tools(
|
||||
self,
|
||||
tools: Sequence[Union[Dict[str, Any], Type, Callable, BaseTool]],
|
||||
tools: Sequence[Union[typing.Dict[str, Any], type, Callable, BaseTool]], # noqa: UP006
|
||||
**kwargs: Any,
|
||||
) -> Runnable[LanguageModelInput, BaseMessage]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def with_structured_output(
|
||||
self,
|
||||
schema: Union[Dict, Type],
|
||||
schema: Union[typing.Dict, type], # noqa: UP006
|
||||
*,
|
||||
include_raw: bool = False,
|
||||
**kwargs: Any,
|
||||
) -> Runnable[LanguageModelInput, Union[Dict, BaseModel]]:
|
||||
) -> Runnable[LanguageModelInput, Union[typing.Dict, BaseModel]]: # noqa: UP006
|
||||
"""Model wrapper that returns outputs formatted to match the given schema.
|
||||
|
||||
Args:
|
||||
@@ -1281,8 +1277,8 @@ class SimpleChatModel(BaseChatModel):
|
||||
|
||||
def _generate(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> ChatResult:
|
||||
@@ -1294,8 +1290,8 @@ class SimpleChatModel(BaseChatModel):
|
||||
@abstractmethod
|
||||
def _call(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
@@ -1303,8 +1299,8 @@ class SimpleChatModel(BaseChatModel):
|
||||
|
||||
async def _agenerate(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> ChatResult:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import time
|
||||
from typing import Any, AsyncIterator, Iterator, List, Mapping, Optional
|
||||
from collections.abc import AsyncIterator, Iterator, Mapping
|
||||
from typing import Any, Optional
|
||||
|
||||
from langchain_core.callbacks import (
|
||||
AsyncCallbackManagerForLLMRun,
|
||||
@@ -14,7 +15,7 @@ from langchain_core.runnables import RunnableConfig
|
||||
class FakeListLLM(LLM):
|
||||
"""Fake LLM for testing purposes."""
|
||||
|
||||
responses: List[str]
|
||||
responses: list[str]
|
||||
"""List of responses to return in order."""
|
||||
# This parameter should be removed from FakeListLLM since
|
||||
# it's only used by sub-classes.
|
||||
@@ -37,7 +38,7 @@ class FakeListLLM(LLM):
|
||||
def _call(
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
@@ -52,7 +53,7 @@ class FakeListLLM(LLM):
|
||||
async def _acall(
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
@@ -90,7 +91,7 @@ class FakeStreamingListLLM(FakeListLLM):
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Iterator[str]:
|
||||
result = self.invoke(input, config)
|
||||
@@ -110,7 +111,7 @@ class FakeStreamingListLLM(FakeListLLM):
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[str]:
|
||||
result = await self.ainvoke(input, config)
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
import asyncio
|
||||
import re
|
||||
import time
|
||||
from typing import Any, AsyncIterator, Dict, Iterator, List, Optional, Union, cast
|
||||
from collections.abc import AsyncIterator, Iterator
|
||||
from typing import Any, Optional, Union, cast
|
||||
|
||||
from langchain_core.callbacks import (
|
||||
AsyncCallbackManagerForLLMRun,
|
||||
@@ -17,7 +18,7 @@ from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResu
|
||||
class FakeMessagesListChatModel(BaseChatModel):
|
||||
"""Fake ChatModel for testing purposes."""
|
||||
|
||||
responses: List[BaseMessage]
|
||||
responses: list[BaseMessage]
|
||||
"""List of responses to **cycle** through in order."""
|
||||
sleep: Optional[float] = None
|
||||
"""Sleep time in seconds between responses."""
|
||||
@@ -26,8 +27,8 @@ class FakeMessagesListChatModel(BaseChatModel):
|
||||
|
||||
def _generate(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> ChatResult:
|
||||
@@ -51,7 +52,7 @@ class FakeListChatModelError(Exception):
|
||||
class FakeListChatModel(SimpleChatModel):
|
||||
"""Fake ChatModel for testing purposes."""
|
||||
|
||||
responses: List[str]
|
||||
responses: list[str]
|
||||
"""List of responses to **cycle** through in order."""
|
||||
sleep: Optional[float] = None
|
||||
i: int = 0
|
||||
@@ -65,8 +66,8 @@ class FakeListChatModel(SimpleChatModel):
|
||||
|
||||
def _call(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
@@ -80,8 +81,8 @@ class FakeListChatModel(SimpleChatModel):
|
||||
|
||||
def _stream(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Union[List[str], None] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Union[list[str], None] = None,
|
||||
run_manager: Union[CallbackManagerForLLMRun, None] = None,
|
||||
**kwargs: Any,
|
||||
) -> Iterator[ChatGenerationChunk]:
|
||||
@@ -103,8 +104,8 @@ class FakeListChatModel(SimpleChatModel):
|
||||
|
||||
async def _astream(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Union[List[str], None] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Union[list[str], None] = None,
|
||||
run_manager: Union[AsyncCallbackManagerForLLMRun, None] = None,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[ChatGenerationChunk]:
|
||||
@@ -124,7 +125,7 @@ class FakeListChatModel(SimpleChatModel):
|
||||
yield ChatGenerationChunk(message=AIMessageChunk(content=c))
|
||||
|
||||
@property
|
||||
def _identifying_params(self) -> Dict[str, Any]:
|
||||
def _identifying_params(self) -> dict[str, Any]:
|
||||
return {"responses": self.responses}
|
||||
|
||||
|
||||
@@ -133,8 +134,8 @@ class FakeChatModel(SimpleChatModel):
|
||||
|
||||
def _call(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
@@ -142,8 +143,8 @@ class FakeChatModel(SimpleChatModel):
|
||||
|
||||
async def _agenerate(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> ChatResult:
|
||||
@@ -157,7 +158,7 @@ class FakeChatModel(SimpleChatModel):
|
||||
return "fake-chat-model"
|
||||
|
||||
@property
|
||||
def _identifying_params(self) -> Dict[str, Any]:
|
||||
def _identifying_params(self) -> dict[str, Any]:
|
||||
return {"key": "fake"}
|
||||
|
||||
|
||||
@@ -186,8 +187,8 @@ class GenericFakeChatModel(BaseChatModel):
|
||||
|
||||
def _generate(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> ChatResult:
|
||||
@@ -202,8 +203,8 @@ class GenericFakeChatModel(BaseChatModel):
|
||||
|
||||
def _stream(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> Iterator[ChatGenerationChunk]:
|
||||
@@ -231,7 +232,7 @@ class GenericFakeChatModel(BaseChatModel):
|
||||
# Use a regular expression to split on whitespace with a capture group
|
||||
# so that we can preserve the whitespace in the output.
|
||||
assert isinstance(content, str)
|
||||
content_chunks = cast(List[str], re.split(r"(\s)", content))
|
||||
content_chunks = cast(list[str], re.split(r"(\s)", content))
|
||||
|
||||
for token in content_chunks:
|
||||
chunk = ChatGenerationChunk(
|
||||
@@ -249,7 +250,7 @@ class GenericFakeChatModel(BaseChatModel):
|
||||
for fkey, fvalue in value.items():
|
||||
if isinstance(fvalue, str):
|
||||
# Break function call by `,`
|
||||
fvalue_chunks = cast(List[str], re.split(r"(,)", fvalue))
|
||||
fvalue_chunks = cast(list[str], re.split(r"(,)", fvalue))
|
||||
for fvalue_chunk in fvalue_chunks:
|
||||
chunk = ChatGenerationChunk(
|
||||
message=AIMessageChunk(
|
||||
@@ -306,8 +307,8 @@ class ParrotFakeChatModel(BaseChatModel):
|
||||
|
||||
def _generate(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
stop: Optional[List[str]] = None,
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> ChatResult:
|
||||
|
||||
@@ -10,18 +10,12 @@ import logging
|
||||
import uuid
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import AsyncIterator, Iterator, Sequence
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
@@ -76,7 +70,7 @@ def _log_error_once(msg: str) -> None:
|
||||
|
||||
|
||||
def create_base_retry_decorator(
|
||||
error_types: List[Type[BaseException]],
|
||||
error_types: list[type[BaseException]],
|
||||
max_retries: int = 1,
|
||||
run_manager: Optional[
|
||||
Union[AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun]
|
||||
@@ -153,10 +147,10 @@ def _resolve_cache(cache: Union[BaseCache, bool, None]) -> Optional[BaseCache]:
|
||||
|
||||
|
||||
def get_prompts(
|
||||
params: Dict[str, Any],
|
||||
prompts: List[str],
|
||||
params: dict[str, Any],
|
||||
prompts: list[str],
|
||||
cache: Optional[Union[BaseCache, bool, None]] = None,
|
||||
) -> Tuple[Dict[int, List], str, List[int], List[str]]:
|
||||
) -> tuple[dict[int, list], str, list[int], list[str]]:
|
||||
"""Get prompts that are already cached.
|
||||
|
||||
Args:
|
||||
@@ -189,10 +183,10 @@ def get_prompts(
|
||||
|
||||
|
||||
async def aget_prompts(
|
||||
params: Dict[str, Any],
|
||||
prompts: List[str],
|
||||
params: dict[str, Any],
|
||||
prompts: list[str],
|
||||
cache: Optional[Union[BaseCache, bool, None]] = None,
|
||||
) -> Tuple[Dict[int, List], str, List[int], List[str]]:
|
||||
) -> tuple[dict[int, list], str, list[int], list[str]]:
|
||||
"""Get prompts that are already cached. Async version.
|
||||
|
||||
Args:
|
||||
@@ -225,11 +219,11 @@ async def aget_prompts(
|
||||
|
||||
def update_cache(
|
||||
cache: Union[BaseCache, bool, None],
|
||||
existing_prompts: Dict[int, List],
|
||||
existing_prompts: dict[int, list],
|
||||
llm_string: str,
|
||||
missing_prompt_idxs: List[int],
|
||||
missing_prompt_idxs: list[int],
|
||||
new_results: LLMResult,
|
||||
prompts: List[str],
|
||||
prompts: list[str],
|
||||
) -> Optional[dict]:
|
||||
"""Update the cache and get the LLM output.
|
||||
|
||||
@@ -259,11 +253,11 @@ def update_cache(
|
||||
|
||||
async def aupdate_cache(
|
||||
cache: Union[BaseCache, bool, None],
|
||||
existing_prompts: Dict[int, List],
|
||||
existing_prompts: dict[int, list],
|
||||
llm_string: str,
|
||||
missing_prompt_idxs: List[int],
|
||||
missing_prompt_idxs: list[int],
|
||||
new_results: LLMResult,
|
||||
prompts: List[str],
|
||||
prompts: list[str],
|
||||
) -> Optional[dict]:
|
||||
"""Update the cache and get the LLM output. Async version.
|
||||
|
||||
@@ -306,7 +300,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def raise_deprecation(cls, values: Dict) -> Any:
|
||||
def raise_deprecation(cls, values: dict) -> Any:
|
||||
"""Raise deprecation warning if callback_manager is used."""
|
||||
if values.get("callback_manager") is not None:
|
||||
warnings.warn(
|
||||
@@ -324,7 +318,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
# --- Runnable methods ---
|
||||
|
||||
@property
|
||||
def OutputType(self) -> Type[str]:
|
||||
def OutputType(self) -> type[str]:
|
||||
"""Get the input type for this runnable."""
|
||||
return str
|
||||
|
||||
@@ -343,7 +337,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
def _get_ls_params(
|
||||
self,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> LangSmithParams:
|
||||
"""Get standard params for tracing."""
|
||||
@@ -383,7 +377,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
config = ensure_config(config)
|
||||
@@ -407,7 +401,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
config = ensure_config(config)
|
||||
@@ -425,12 +419,12 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
def batch(
|
||||
self,
|
||||
inputs: List[LanguageModelInput],
|
||||
config: Optional[Union[RunnableConfig, List[RunnableConfig]]] = None,
|
||||
inputs: list[LanguageModelInput],
|
||||
config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None,
|
||||
*,
|
||||
return_exceptions: bool = False,
|
||||
**kwargs: Any,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
if not inputs:
|
||||
return []
|
||||
|
||||
@@ -450,7 +444,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
return [g[0].text for g in llm_result.generations]
|
||||
except Exception as e:
|
||||
if return_exceptions:
|
||||
return cast(List[str], [e for _ in inputs])
|
||||
return cast(list[str], [e for _ in inputs])
|
||||
else:
|
||||
raise e
|
||||
else:
|
||||
@@ -472,12 +466,12 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
async def abatch(
|
||||
self,
|
||||
inputs: List[LanguageModelInput],
|
||||
config: Optional[Union[RunnableConfig, List[RunnableConfig]]] = None,
|
||||
inputs: list[LanguageModelInput],
|
||||
config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None,
|
||||
*,
|
||||
return_exceptions: bool = False,
|
||||
**kwargs: Any,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
if not inputs:
|
||||
return []
|
||||
config = get_config_list(config, len(inputs))
|
||||
@@ -496,7 +490,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
return [g[0].text for g in llm_result.generations]
|
||||
except Exception as e:
|
||||
if return_exceptions:
|
||||
return cast(List[str], [e for _ in inputs])
|
||||
return cast(list[str], [e for _ in inputs])
|
||||
else:
|
||||
raise e
|
||||
else:
|
||||
@@ -521,7 +515,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Iterator[str]:
|
||||
if type(self)._stream == BaseLLM._stream:
|
||||
@@ -583,7 +577,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[str]:
|
||||
if (
|
||||
@@ -649,8 +643,8 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
@abstractmethod
|
||||
def _generate(
|
||||
self,
|
||||
prompts: List[str],
|
||||
stop: Optional[List[str]] = None,
|
||||
prompts: list[str],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
@@ -658,8 +652,8 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
async def _agenerate(
|
||||
self,
|
||||
prompts: List[str],
|
||||
stop: Optional[List[str]] = None,
|
||||
prompts: list[str],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
@@ -676,7 +670,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
def _stream(
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> Iterator[GenerationChunk]:
|
||||
@@ -704,7 +698,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
async def _astream(
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[GenerationChunk]:
|
||||
@@ -747,9 +741,9 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
def generate_prompt(
|
||||
self,
|
||||
prompts: List[PromptValue],
|
||||
stop: Optional[List[str]] = None,
|
||||
callbacks: Optional[Union[Callbacks, List[Callbacks]]] = None,
|
||||
prompts: list[PromptValue],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Optional[Union[Callbacks, list[Callbacks]]] = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
prompt_strings = [p.to_string() for p in prompts]
|
||||
@@ -757,9 +751,9 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
async def agenerate_prompt(
|
||||
self,
|
||||
prompts: List[PromptValue],
|
||||
stop: Optional[List[str]] = None,
|
||||
callbacks: Optional[Union[Callbacks, List[Callbacks]]] = None,
|
||||
prompts: list[PromptValue],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Optional[Union[Callbacks, list[Callbacks]]] = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
prompt_strings = [p.to_string() for p in prompts]
|
||||
@@ -769,9 +763,9 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
def _generate_helper(
|
||||
self,
|
||||
prompts: List[str],
|
||||
stop: Optional[List[str]],
|
||||
run_managers: List[CallbackManagerForLLMRun],
|
||||
prompts: list[str],
|
||||
stop: Optional[list[str]],
|
||||
run_managers: list[CallbackManagerForLLMRun],
|
||||
new_arg_supported: bool,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
@@ -802,14 +796,14 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
def generate(
|
||||
self,
|
||||
prompts: List[str],
|
||||
stop: Optional[List[str]] = None,
|
||||
callbacks: Optional[Union[Callbacks, List[Callbacks]]] = None,
|
||||
prompts: list[str],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Optional[Union[Callbacks, list[Callbacks]]] = None,
|
||||
*,
|
||||
tags: Optional[Union[List[str], List[List[str]]]] = None,
|
||||
metadata: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None,
|
||||
run_name: Optional[Union[str, List[str]]] = None,
|
||||
run_id: Optional[Union[uuid.UUID, List[Optional[uuid.UUID]]]] = None,
|
||||
tags: Optional[Union[list[str], list[list[str]]]] = None,
|
||||
metadata: Optional[Union[dict[str, Any], list[dict[str, Any]]]] = None,
|
||||
run_name: Optional[Union[str, list[str]]] = None,
|
||||
run_id: Optional[Union[uuid.UUID, list[Optional[uuid.UUID]]]] = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
"""Pass a sequence of prompts to a model and return generations.
|
||||
@@ -885,13 +879,13 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
assert run_name is None or (
|
||||
isinstance(run_name, list) and len(run_name) == len(prompts)
|
||||
)
|
||||
callbacks = cast(List[Callbacks], callbacks)
|
||||
tags_list = cast(List[Optional[List[str]]], tags or ([None] * len(prompts)))
|
||||
callbacks = cast(list[Callbacks], callbacks)
|
||||
tags_list = cast(list[Optional[list[str]]], tags or ([None] * len(prompts)))
|
||||
metadata_list = cast(
|
||||
List[Optional[Dict[str, Any]]], metadata or ([{}] * len(prompts))
|
||||
list[Optional[dict[str, Any]]], metadata or ([{}] * len(prompts))
|
||||
)
|
||||
run_name_list = run_name or cast(
|
||||
List[Optional[str]], ([None] * len(prompts))
|
||||
list[Optional[str]], ([None] * len(prompts))
|
||||
)
|
||||
callback_managers = [
|
||||
CallbackManager.configure(
|
||||
@@ -912,9 +906,9 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
cast(Callbacks, callbacks),
|
||||
self.callbacks,
|
||||
self.verbose,
|
||||
cast(List[str], tags),
|
||||
cast(list[str], tags),
|
||||
self.tags,
|
||||
cast(Dict[str, Any], metadata),
|
||||
cast(dict[str, Any], metadata),
|
||||
self.metadata,
|
||||
)
|
||||
] * len(prompts)
|
||||
@@ -987,7 +981,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
@staticmethod
|
||||
def _get_run_ids_list(
|
||||
run_id: Optional[Union[uuid.UUID, List[Optional[uuid.UUID]]]], prompts: list
|
||||
run_id: Optional[Union[uuid.UUID, list[Optional[uuid.UUID]]]], prompts: list
|
||||
) -> list:
|
||||
if run_id is None:
|
||||
return [None] * len(prompts)
|
||||
@@ -1002,9 +996,9 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
async def _agenerate_helper(
|
||||
self,
|
||||
prompts: List[str],
|
||||
stop: Optional[List[str]],
|
||||
run_managers: List[AsyncCallbackManagerForLLMRun],
|
||||
prompts: list[str],
|
||||
stop: Optional[list[str]],
|
||||
run_managers: list[AsyncCallbackManagerForLLMRun],
|
||||
new_arg_supported: bool,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
@@ -1044,14 +1038,14 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
|
||||
async def agenerate(
|
||||
self,
|
||||
prompts: List[str],
|
||||
stop: Optional[List[str]] = None,
|
||||
callbacks: Optional[Union[Callbacks, List[Callbacks]]] = None,
|
||||
prompts: list[str],
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Optional[Union[Callbacks, list[Callbacks]]] = None,
|
||||
*,
|
||||
tags: Optional[Union[List[str], List[List[str]]]] = None,
|
||||
metadata: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None,
|
||||
run_name: Optional[Union[str, List[str]]] = None,
|
||||
run_id: Optional[Union[uuid.UUID, List[Optional[uuid.UUID]]]] = None,
|
||||
tags: Optional[Union[list[str], list[list[str]]]] = None,
|
||||
metadata: Optional[Union[dict[str, Any], list[dict[str, Any]]]] = None,
|
||||
run_name: Optional[Union[str, list[str]]] = None,
|
||||
run_id: Optional[Union[uuid.UUID, list[Optional[uuid.UUID]]]] = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
"""Asynchronously pass a sequence of prompts to a model and return generations.
|
||||
@@ -1118,13 +1112,13 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
assert run_name is None or (
|
||||
isinstance(run_name, list) and len(run_name) == len(prompts)
|
||||
)
|
||||
callbacks = cast(List[Callbacks], callbacks)
|
||||
tags_list = cast(List[Optional[List[str]]], tags or ([None] * len(prompts)))
|
||||
callbacks = cast(list[Callbacks], callbacks)
|
||||
tags_list = cast(list[Optional[list[str]]], tags or ([None] * len(prompts)))
|
||||
metadata_list = cast(
|
||||
List[Optional[Dict[str, Any]]], metadata or ([{}] * len(prompts))
|
||||
list[Optional[dict[str, Any]]], metadata or ([{}] * len(prompts))
|
||||
)
|
||||
run_name_list = run_name or cast(
|
||||
List[Optional[str]], ([None] * len(prompts))
|
||||
list[Optional[str]], ([None] * len(prompts))
|
||||
)
|
||||
callback_managers = [
|
||||
AsyncCallbackManager.configure(
|
||||
@@ -1145,9 +1139,9 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
cast(Callbacks, callbacks),
|
||||
self.callbacks,
|
||||
self.verbose,
|
||||
cast(List[str], tags),
|
||||
cast(list[str], tags),
|
||||
self.tags,
|
||||
cast(Dict[str, Any], metadata),
|
||||
cast(dict[str, Any], metadata),
|
||||
self.metadata,
|
||||
)
|
||||
] * len(prompts)
|
||||
@@ -1239,11 +1233,11 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
def __call__(
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Callbacks = None,
|
||||
*,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
"""Check Cache and run the LLM on the given prompt and input.
|
||||
@@ -1287,11 +1281,11 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
async def _call_async(
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
callbacks: Callbacks = None,
|
||||
*,
|
||||
tags: Optional[List[str]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
tags: Optional[list[str]] = None,
|
||||
metadata: Optional[dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
"""Check Cache and run the LLM on the given prompt and input."""
|
||||
@@ -1318,7 +1312,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||
def predict_messages(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
messages: list[BaseMessage],
|
||||
*,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
**kwargs: Any,
|
||||
@@ -1344,7 +1338,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
|
||||
async def apredict_messages(
|
||||
self,
|
||||
messages: List[BaseMessage],
|
||||
messages: list[BaseMessage],
|
||||
*,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
**kwargs: Any,
|
||||
@@ -1367,7 +1361,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
def _llm_type(self) -> str:
|
||||
"""Return type of llm."""
|
||||
|
||||
def dict(self, **kwargs: Any) -> Dict:
|
||||
def dict(self, **kwargs: Any) -> dict:
|
||||
"""Return a dictionary of the LLM."""
|
||||
starter_dict = dict(self._identifying_params)
|
||||
starter_dict["_type"] = self._llm_type
|
||||
@@ -1443,7 +1437,7 @@ class LLM(BaseLLM):
|
||||
def _call(
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
@@ -1467,7 +1461,7 @@ class LLM(BaseLLM):
|
||||
async def _acall(
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
@@ -1500,8 +1494,8 @@ class LLM(BaseLLM):
|
||||
|
||||
def _generate(
|
||||
self,
|
||||
prompts: List[str],
|
||||
stop: Optional[List[str]] = None,
|
||||
prompts: list[str],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
@@ -1520,8 +1514,8 @@ class LLM(BaseLLM):
|
||||
|
||||
async def _agenerate(
|
||||
self,
|
||||
prompts: List[str],
|
||||
stop: Optional[List[str]] = None,
|
||||
prompts: list[str],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> LLMResult:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import importlib
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Optional
|
||||
|
||||
from langchain_core._api import beta
|
||||
from langchain_core.load.mapping import (
|
||||
@@ -34,11 +34,11 @@ class Reviver:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
secrets_map: Optional[Dict[str, str]] = None,
|
||||
valid_namespaces: Optional[List[str]] = None,
|
||||
secrets_map: Optional[dict[str, str]] = None,
|
||||
valid_namespaces: Optional[list[str]] = None,
|
||||
secrets_from_env: bool = True,
|
||||
additional_import_mappings: Optional[
|
||||
Dict[Tuple[str, ...], Tuple[str, ...]]
|
||||
dict[tuple[str, ...], tuple[str, ...]]
|
||||
] = None,
|
||||
) -> None:
|
||||
"""Initialize the reviver.
|
||||
@@ -73,7 +73,7 @@ class Reviver:
|
||||
else ALL_SERIALIZABLE_MAPPINGS
|
||||
)
|
||||
|
||||
def __call__(self, value: Dict[str, Any]) -> Any:
|
||||
def __call__(self, value: dict[str, Any]) -> Any:
|
||||
if (
|
||||
value.get("lc", None) == 1
|
||||
and value.get("type", None) == "secret"
|
||||
@@ -154,10 +154,10 @@ class Reviver:
|
||||
def loads(
|
||||
text: str,
|
||||
*,
|
||||
secrets_map: Optional[Dict[str, str]] = None,
|
||||
valid_namespaces: Optional[List[str]] = None,
|
||||
secrets_map: Optional[dict[str, str]] = None,
|
||||
valid_namespaces: Optional[list[str]] = None,
|
||||
secrets_from_env: bool = True,
|
||||
additional_import_mappings: Optional[Dict[Tuple[str, ...], Tuple[str, ...]]] = None,
|
||||
additional_import_mappings: Optional[dict[tuple[str, ...], tuple[str, ...]]] = None,
|
||||
) -> Any:
|
||||
"""Revive a LangChain class from a JSON string.
|
||||
Equivalent to `load(json.loads(text))`.
|
||||
@@ -190,10 +190,10 @@ def loads(
|
||||
def load(
|
||||
obj: Any,
|
||||
*,
|
||||
secrets_map: Optional[Dict[str, str]] = None,
|
||||
valid_namespaces: Optional[List[str]] = None,
|
||||
secrets_map: Optional[dict[str, str]] = None,
|
||||
valid_namespaces: Optional[list[str]] = None,
|
||||
secrets_from_env: bool = True,
|
||||
additional_import_mappings: Optional[Dict[Tuple[str, ...], Tuple[str, ...]]] = None,
|
||||
additional_import_mappings: Optional[dict[tuple[str, ...], tuple[str, ...]]] = None,
|
||||
) -> Any:
|
||||
"""Revive a LangChain class from a JSON object. Use this if you already
|
||||
have a parsed JSON object, eg. from `json.load` or `orjson.loads`.
|
||||
|
||||
@@ -18,11 +18,9 @@ The mapping allows us to deserialize an AIMessage created with an older
|
||||
version of LangChain where the code was in a different location.
|
||||
"""
|
||||
|
||||
from typing import Dict, Tuple
|
||||
|
||||
# First value is the value that it is serialized as
|
||||
# Second value is the path to load it from
|
||||
SERIALIZABLE_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
|
||||
SERIALIZABLE_MAPPING: dict[tuple[str, ...], tuple[str, ...]] = {
|
||||
("langchain", "schema", "messages", "AIMessage"): (
|
||||
"langchain_core",
|
||||
"messages",
|
||||
@@ -535,7 +533,7 @@ SERIALIZABLE_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
|
||||
|
||||
# Needed for backwards compatibility for old versions of LangChain where things
|
||||
# Were in different place
|
||||
_OG_SERIALIZABLE_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
|
||||
_OG_SERIALIZABLE_MAPPING: dict[tuple[str, ...], tuple[str, ...]] = {
|
||||
("langchain", "schema", "AIMessage"): (
|
||||
"langchain_core",
|
||||
"messages",
|
||||
@@ -583,7 +581,7 @@ _OG_SERIALIZABLE_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
|
||||
|
||||
# Needed for backwards compatibility for a few versions where we serialized
|
||||
# with langchain_core paths.
|
||||
OLD_CORE_NAMESPACES_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
|
||||
OLD_CORE_NAMESPACES_MAPPING: dict[tuple[str, ...], tuple[str, ...]] = {
|
||||
("langchain_core", "messages", "ai", "AIMessage"): (
|
||||
"langchain_core",
|
||||
"messages",
|
||||
@@ -937,7 +935,7 @@ OLD_CORE_NAMESPACES_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
|
||||
),
|
||||
}
|
||||
|
||||
_JS_SERIALIZABLE_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
|
||||
_JS_SERIALIZABLE_MAPPING: dict[tuple[str, ...], tuple[str, ...]] = {
|
||||
("langchain_core", "messages", "AIMessage"): (
|
||||
"langchain_core",
|
||||
"messages",
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
from abc import ABC
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
TypedDict,
|
||||
@@ -25,9 +23,9 @@ class BaseSerialized(TypedDict):
|
||||
"""
|
||||
|
||||
lc: int
|
||||
id: List[str]
|
||||
id: list[str]
|
||||
name: NotRequired[str]
|
||||
graph: NotRequired[Dict[str, Any]]
|
||||
graph: NotRequired[dict[str, Any]]
|
||||
|
||||
|
||||
class SerializedConstructor(BaseSerialized):
|
||||
@@ -39,7 +37,7 @@ class SerializedConstructor(BaseSerialized):
|
||||
"""
|
||||
|
||||
type: Literal["constructor"]
|
||||
kwargs: Dict[str, Any]
|
||||
kwargs: dict[str, Any]
|
||||
|
||||
|
||||
class SerializedSecret(BaseSerialized):
|
||||
@@ -125,7 +123,7 @@ class Serializable(BaseModel, ABC):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
For example, if the class is `langchain.llms.openai.OpenAI`, then the
|
||||
@@ -134,7 +132,7 @@ class Serializable(BaseModel, ABC):
|
||||
return cls.__module__.split(".")
|
||||
|
||||
@property
|
||||
def lc_secrets(self) -> Dict[str, str]:
|
||||
def lc_secrets(self) -> dict[str, str]:
|
||||
"""A map of constructor argument names to secret ids.
|
||||
|
||||
For example,
|
||||
@@ -143,7 +141,7 @@ class Serializable(BaseModel, ABC):
|
||||
return dict()
|
||||
|
||||
@property
|
||||
def lc_attributes(self) -> Dict:
|
||||
def lc_attributes(self) -> dict:
|
||||
"""List of attribute names that should be included in the serialized kwargs.
|
||||
|
||||
These attributes must be accepted by the constructor.
|
||||
@@ -152,7 +150,7 @@ class Serializable(BaseModel, ABC):
|
||||
return {}
|
||||
|
||||
@classmethod
|
||||
def lc_id(cls) -> List[str]:
|
||||
def lc_id(cls) -> list[str]:
|
||||
"""A unique identifier for this class for serialization purposes.
|
||||
|
||||
The unique identifier is a list of strings that describes the path
|
||||
@@ -315,8 +313,8 @@ def _is_field_useful(inst: Serializable, key: str, value: Any) -> bool:
|
||||
|
||||
|
||||
def _replace_secrets(
|
||||
root: Dict[Any, Any], secrets_map: Dict[str, str]
|
||||
) -> Dict[Any, Any]:
|
||||
root: dict[Any, Any], secrets_map: dict[str, str]
|
||||
) -> dict[Any, Any]:
|
||||
result = root.copy()
|
||||
for path, secret_id in secrets_map.items():
|
||||
[*parts, last] = path.split(".")
|
||||
@@ -344,7 +342,7 @@ def to_json_not_implemented(obj: object) -> SerializedNotImplemented:
|
||||
Returns:
|
||||
SerializedNotImplemented
|
||||
"""
|
||||
_id: List[str] = []
|
||||
_id: list[str] = []
|
||||
try:
|
||||
if hasattr(obj, "__name__"):
|
||||
_id = [*obj.__module__.split("."), obj.__name__]
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any
|
||||
|
||||
from pydantic import ConfigDict
|
||||
|
||||
@@ -55,11 +55,11 @@ class BaseMemory(Serializable, ABC):
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def memory_variables(self) -> List[str]:
|
||||
def memory_variables(self) -> list[str]:
|
||||
"""The string keys this memory class will add to chain inputs."""
|
||||
|
||||
@abstractmethod
|
||||
def load_memory_variables(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def load_memory_variables(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Return key-value pairs given the text input to the chain.
|
||||
|
||||
Args:
|
||||
@@ -69,7 +69,7 @@ class BaseMemory(Serializable, ABC):
|
||||
A dictionary of key-value pairs.
|
||||
"""
|
||||
|
||||
async def aload_memory_variables(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
|
||||
async def aload_memory_variables(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Async return key-value pairs given the text input to the chain.
|
||||
|
||||
Args:
|
||||
@@ -81,7 +81,7 @@ class BaseMemory(Serializable, ABC):
|
||||
return await run_in_executor(None, self.load_memory_variables, inputs)
|
||||
|
||||
@abstractmethod
|
||||
def save_context(self, inputs: Dict[str, Any], outputs: Dict[str, str]) -> None:
|
||||
def save_context(self, inputs: dict[str, Any], outputs: dict[str, str]) -> None:
|
||||
"""Save the context of this chain run to memory.
|
||||
|
||||
Args:
|
||||
@@ -90,7 +90,7 @@ class BaseMemory(Serializable, ABC):
|
||||
"""
|
||||
|
||||
async def asave_context(
|
||||
self, inputs: Dict[str, Any], outputs: Dict[str, str]
|
||||
self, inputs: dict[str, Any], outputs: dict[str, str]
|
||||
) -> None:
|
||||
"""Async save the context of this chain run to memory.
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import json
|
||||
from typing import Any, Dict, List, Literal, Optional, Union
|
||||
from typing import Any, Literal, Optional, Union
|
||||
|
||||
from pydantic import model_validator
|
||||
from typing_extensions import Self, TypedDict
|
||||
@@ -69,9 +69,9 @@ class AIMessage(BaseMessage):
|
||||
At the moment, this is ignored by most models. Usage is discouraged.
|
||||
"""
|
||||
|
||||
tool_calls: List[ToolCall] = []
|
||||
tool_calls: list[ToolCall] = []
|
||||
"""If provided, tool calls associated with the message."""
|
||||
invalid_tool_calls: List[InvalidToolCall] = []
|
||||
invalid_tool_calls: list[InvalidToolCall] = []
|
||||
"""If provided, tool calls with parsing errors associated with the message."""
|
||||
usage_metadata: Optional[UsageMetadata] = None
|
||||
"""If provided, usage metadata for a message, such as token counts.
|
||||
@@ -83,7 +83,7 @@ class AIMessage(BaseMessage):
|
||||
"""The type of the message (used for deserialization). Defaults to "ai"."""
|
||||
|
||||
def __init__(
|
||||
self, content: Union[str, List[Union[str, Dict]]], **kwargs: Any
|
||||
self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
|
||||
) -> None:
|
||||
"""Pass in content as positional arg.
|
||||
|
||||
@@ -94,7 +94,7 @@ class AIMessage(BaseMessage):
|
||||
super().__init__(content=content, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Returns:
|
||||
@@ -104,7 +104,7 @@ class AIMessage(BaseMessage):
|
||||
return ["langchain", "schema", "messages"]
|
||||
|
||||
@property
|
||||
def lc_attributes(self) -> Dict:
|
||||
def lc_attributes(self) -> dict:
|
||||
"""Attrs to be serialized even if they are derived from other init args."""
|
||||
return {
|
||||
"tool_calls": self.tool_calls,
|
||||
@@ -137,7 +137,7 @@ class AIMessage(BaseMessage):
|
||||
|
||||
# Ensure "type" is properly set on all tool call-like dicts.
|
||||
if tool_calls := values.get("tool_calls"):
|
||||
updated: List = []
|
||||
updated: list = []
|
||||
for tc in tool_calls:
|
||||
updated.append(
|
||||
create_tool_call(**{k: v for k, v in tc.items() if k != "type"})
|
||||
@@ -178,7 +178,7 @@ class AIMessage(BaseMessage):
|
||||
base = super().pretty_repr(html=html)
|
||||
lines = []
|
||||
|
||||
def _format_tool_args(tc: Union[ToolCall, InvalidToolCall]) -> List[str]:
|
||||
def _format_tool_args(tc: Union[ToolCall, InvalidToolCall]) -> list[str]:
|
||||
lines = [
|
||||
f" {tc.get('name', 'Tool')} ({tc.get('id')})",
|
||||
f" Call ID: {tc.get('id')}",
|
||||
@@ -218,11 +218,11 @@ class AIMessageChunk(AIMessage, BaseMessageChunk):
|
||||
"""The type of the message (used for deserialization).
|
||||
Defaults to "AIMessageChunk"."""
|
||||
|
||||
tool_call_chunks: List[ToolCallChunk] = []
|
||||
tool_call_chunks: list[ToolCallChunk] = []
|
||||
"""If provided, tool call chunks associated with the message."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Returns:
|
||||
@@ -232,7 +232,7 @@ class AIMessageChunk(AIMessage, BaseMessageChunk):
|
||||
return ["langchain", "schema", "messages"]
|
||||
|
||||
@property
|
||||
def lc_attributes(self) -> Dict:
|
||||
def lc_attributes(self) -> dict:
|
||||
"""Attrs to be serialized even if they are derived from other init args."""
|
||||
return {
|
||||
"tool_calls": self.tool_calls,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, cast
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union, cast
|
||||
|
||||
from pydantic import ConfigDict, Field, field_validator
|
||||
|
||||
@@ -19,7 +20,7 @@ class BaseMessage(Serializable):
|
||||
Messages are the inputs and outputs of ChatModels.
|
||||
"""
|
||||
|
||||
content: Union[str, List[Union[str, Dict]]]
|
||||
content: Union[str, list[Union[str, dict]]]
|
||||
"""The string contents of the message."""
|
||||
|
||||
additional_kwargs: dict = Field(default_factory=dict)
|
||||
@@ -64,7 +65,7 @@ class BaseMessage(Serializable):
|
||||
return id_value
|
||||
|
||||
def __init__(
|
||||
self, content: Union[str, List[Union[str, Dict]]], **kwargs: Any
|
||||
self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
|
||||
) -> None:
|
||||
"""Pass in content as positional arg.
|
||||
|
||||
@@ -85,7 +86,7 @@ class BaseMessage(Serializable):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"].
|
||||
"""
|
||||
@@ -119,9 +120,9 @@ class BaseMessage(Serializable):
|
||||
|
||||
|
||||
def merge_content(
|
||||
first_content: Union[str, List[Union[str, Dict]]],
|
||||
*contents: Union[str, List[Union[str, Dict]]],
|
||||
) -> Union[str, List[Union[str, Dict]]]:
|
||||
first_content: Union[str, list[Union[str, dict]]],
|
||||
*contents: Union[str, list[Union[str, dict]]],
|
||||
) -> Union[str, list[Union[str, dict]]]:
|
||||
"""Merge two message contents.
|
||||
|
||||
Args:
|
||||
@@ -143,7 +144,7 @@ def merge_content(
|
||||
merged = [merged] + content # type: ignore
|
||||
elif isinstance(content, list):
|
||||
# If both are lists
|
||||
merged = merge_lists(cast(List, merged), content) # type: ignore
|
||||
merged = merge_lists(cast(list, merged), content) # type: ignore
|
||||
# If the first content is a list, and the second content is a string
|
||||
else:
|
||||
# If the last element of the first content is a string
|
||||
@@ -163,7 +164,7 @@ class BaseMessageChunk(BaseMessage):
|
||||
"""Message chunk, which can be concatenated with other Message chunks."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"].
|
||||
"""
|
||||
@@ -242,7 +243,7 @@ def message_to_dict(message: BaseMessage) -> dict:
|
||||
return {"type": message.type, "data": message.model_dump()}
|
||||
|
||||
|
||||
def messages_to_dict(messages: Sequence[BaseMessage]) -> List[dict]:
|
||||
def messages_to_dict(messages: Sequence[BaseMessage]) -> list[dict]:
|
||||
"""Convert a sequence of Messages to a list of dictionaries.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any, List, Literal
|
||||
from typing import Any, Literal
|
||||
|
||||
from langchain_core.messages.base import (
|
||||
BaseMessage,
|
||||
@@ -18,7 +18,7 @@ class ChatMessage(BaseMessage):
|
||||
"""The type of the message (used during serialization). Defaults to "chat"."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"].
|
||||
"""
|
||||
@@ -39,7 +39,7 @@ class ChatMessageChunk(ChatMessage, BaseMessageChunk):
|
||||
Defaults to "ChatMessageChunk"."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"].
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any, List, Literal
|
||||
from typing import Any, Literal
|
||||
|
||||
from langchain_core.messages.base import (
|
||||
BaseMessage,
|
||||
@@ -26,7 +26,7 @@ class FunctionMessage(BaseMessage):
|
||||
"""The type of the message (used for serialization). Defaults to "function"."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"]."""
|
||||
return ["langchain", "schema", "messages"]
|
||||
@@ -46,7 +46,7 @@ class FunctionMessageChunk(FunctionMessage, BaseMessageChunk):
|
||||
Defaults to "FunctionMessageChunk"."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"]."""
|
||||
return ["langchain", "schema", "messages"]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any, Dict, List, Literal, Union
|
||||
from typing import Any, Literal, Union
|
||||
|
||||
from langchain_core.messages.base import BaseMessage, BaseMessageChunk
|
||||
|
||||
@@ -39,13 +39,13 @@ class HumanMessage(BaseMessage):
|
||||
"""The type of the message (used for serialization). Defaults to "human"."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"]."""
|
||||
return ["langchain", "schema", "messages"]
|
||||
|
||||
def __init__(
|
||||
self, content: Union[str, List[Union[str, Dict]]], **kwargs: Any
|
||||
self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
|
||||
) -> None:
|
||||
"""Pass in content as positional arg.
|
||||
|
||||
@@ -70,7 +70,7 @@ class HumanMessageChunk(HumanMessage, BaseMessageChunk):
|
||||
Defaults to "HumanMessageChunk"."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"]."""
|
||||
return ["langchain", "schema", "messages"]
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
from typing import Any, List, Literal
|
||||
from typing import Any, Literal
|
||||
|
||||
from langchain_core._api import beta
|
||||
from langchain_core.messages.base import BaseMessage
|
||||
|
||||
|
||||
@beta()
|
||||
class RemoveMessage(BaseMessage):
|
||||
"""Message responsible for deleting other messages."""
|
||||
|
||||
@@ -27,7 +25,7 @@ class RemoveMessage(BaseMessage):
|
||||
return super().__init__("", id=id, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"]."""
|
||||
return ["langchain", "schema", "messages"]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any, Dict, List, Literal, Union
|
||||
from typing import Any, Literal, Union
|
||||
|
||||
from langchain_core.messages.base import BaseMessage, BaseMessageChunk
|
||||
|
||||
@@ -33,13 +33,13 @@ class SystemMessage(BaseMessage):
|
||||
"""The type of the message (used for serialization). Defaults to "system"."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"]."""
|
||||
return ["langchain", "schema", "messages"]
|
||||
|
||||
def __init__(
|
||||
self, content: Union[str, List[Union[str, Dict]]], **kwargs: Any
|
||||
self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
|
||||
) -> None:
|
||||
"""Pass in content as positional arg.
|
||||
|
||||
@@ -64,7 +64,7 @@ class SystemMessageChunk(SystemMessage, BaseMessageChunk):
|
||||
Defaults to "SystemMessageChunk"."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"]."""
|
||||
return ["langchain", "schema", "messages"]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import json
|
||||
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
|
||||
from typing import Any, Literal, Optional, Union
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import Field, model_validator
|
||||
@@ -78,7 +78,7 @@ class ToolMessage(BaseMessage):
|
||||
"""Currently inherited from BaseMessage, but not used."""
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
Default is ["langchain", "schema", "messages"]."""
|
||||
return ["langchain", "schema", "messages"]
|
||||
@@ -123,7 +123,7 @@ class ToolMessage(BaseMessage):
|
||||
return values
|
||||
|
||||
def __init__(
|
||||
self, content: Union[str, List[Union[str, Dict]]], **kwargs: Any
|
||||
self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
|
||||
) -> None:
|
||||
super().__init__(content=content, **kwargs)
|
||||
|
||||
@@ -140,7 +140,7 @@ class ToolMessageChunk(ToolMessage, BaseMessageChunk):
|
||||
type: Literal["ToolMessageChunk"] = "ToolMessageChunk" # type: ignore[assignment]
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> List[str]:
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object."""
|
||||
return ["langchain", "schema", "messages"]
|
||||
|
||||
@@ -187,7 +187,7 @@ class ToolCall(TypedDict):
|
||||
|
||||
name: str
|
||||
"""The name of the tool to be called."""
|
||||
args: Dict[str, Any]
|
||||
args: dict[str, Any]
|
||||
"""The arguments to the tool call."""
|
||||
id: Optional[str]
|
||||
"""An identifier associated with the tool call.
|
||||
@@ -198,7 +198,7 @@ class ToolCall(TypedDict):
|
||||
type: NotRequired[Literal["tool_call"]]
|
||||
|
||||
|
||||
def tool_call(*, name: str, args: Dict[str, Any], id: Optional[str]) -> ToolCall:
|
||||
def tool_call(*, name: str, args: dict[str, Any], id: Optional[str]) -> ToolCall:
|
||||
return ToolCall(name=name, args=args, id=id, type="tool_call")
|
||||
|
||||
|
||||
@@ -276,8 +276,8 @@ def invalid_tool_call(
|
||||
|
||||
|
||||
def default_tool_parser(
|
||||
raw_tool_calls: List[dict],
|
||||
) -> Tuple[List[ToolCall], List[InvalidToolCall]]:
|
||||
raw_tool_calls: list[dict],
|
||||
) -> tuple[list[ToolCall], list[InvalidToolCall]]:
|
||||
"""Best-effort parsing of tools."""
|
||||
tool_calls = []
|
||||
invalid_tool_calls = []
|
||||
@@ -306,7 +306,7 @@ def default_tool_parser(
|
||||
return tool_calls, invalid_tool_calls
|
||||
|
||||
|
||||
def default_tool_chunk_parser(raw_tool_calls: List[dict]) -> List[ToolCallChunk]:
|
||||
def default_tool_chunk_parser(raw_tool_calls: list[dict]) -> list[ToolCallChunk]:
|
||||
"""Best-effort parsing of tool chunks."""
|
||||
tool_call_chunks = []
|
||||
for tool_call in raw_tool_calls:
|
||||
|
||||
@@ -11,26 +11,21 @@ from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
import json
|
||||
from collections.abc import Iterable, Sequence
|
||||
from functools import partial
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Annotated,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
overload,
|
||||
)
|
||||
|
||||
from pydantic import Discriminator, Field, Tag
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from langchain_core.messages.ai import AIMessage, AIMessageChunk
|
||||
from langchain_core.messages.base import BaseMessage, BaseMessageChunk
|
||||
@@ -166,7 +161,7 @@ def _message_from_dict(message: dict) -> BaseMessage:
|
||||
raise ValueError(f"Got unexpected message type: {_type}")
|
||||
|
||||
|
||||
def messages_from_dict(messages: Sequence[dict]) -> List[BaseMessage]:
|
||||
def messages_from_dict(messages: Sequence[dict]) -> list[BaseMessage]:
|
||||
"""Convert a sequence of messages from dicts to Message objects.
|
||||
|
||||
Args:
|
||||
@@ -199,7 +194,7 @@ def message_chunk_to_message(chunk: BaseMessageChunk) -> BaseMessage:
|
||||
|
||||
|
||||
MessageLikeRepresentation = Union[
|
||||
BaseMessage, List[str], Tuple[str, str], str, Dict[str, Any]
|
||||
BaseMessage, list[str], tuple[str, str], str, dict[str, Any]
|
||||
]
|
||||
|
||||
|
||||
@@ -208,7 +203,7 @@ def _create_message_from_message_type(
|
||||
content: str,
|
||||
name: Optional[str] = None,
|
||||
tool_call_id: Optional[str] = None,
|
||||
tool_calls: Optional[List[Dict[str, Any]]] = None,
|
||||
tool_calls: Optional[list[dict[str, Any]]] = None,
|
||||
id: Optional[str] = None,
|
||||
**additional_kwargs: Any,
|
||||
) -> BaseMessage:
|
||||
@@ -230,7 +225,7 @@ def _create_message_from_message_type(
|
||||
ValueError: if the message type is not one of "human", "user", "ai",
|
||||
"assistant", "system", "function", or "tool".
|
||||
"""
|
||||
kwargs: Dict[str, Any] = {}
|
||||
kwargs: dict[str, Any] = {}
|
||||
if name is not None:
|
||||
kwargs["name"] = name
|
||||
if tool_call_id is not None:
|
||||
@@ -331,7 +326,7 @@ def _convert_to_message(message: MessageLikeRepresentation) -> BaseMessage:
|
||||
|
||||
def convert_to_messages(
|
||||
messages: Union[Iterable[MessageLikeRepresentation], PromptValue],
|
||||
) -> List[BaseMessage]:
|
||||
) -> list[BaseMessage]:
|
||||
"""Convert a sequence of messages to a list of messages.
|
||||
|
||||
Args:
|
||||
@@ -352,18 +347,18 @@ def _runnable_support(func: Callable) -> Callable:
|
||||
@overload
|
||||
def wrapped(
|
||||
messages: Literal[None] = None, **kwargs: Any
|
||||
) -> Runnable[Sequence[MessageLikeRepresentation], List[BaseMessage]]: ...
|
||||
) -> Runnable[Sequence[MessageLikeRepresentation], list[BaseMessage]]: ...
|
||||
|
||||
@overload
|
||||
def wrapped(
|
||||
messages: Sequence[MessageLikeRepresentation], **kwargs: Any
|
||||
) -> List[BaseMessage]: ...
|
||||
) -> list[BaseMessage]: ...
|
||||
|
||||
def wrapped(
|
||||
messages: Optional[Sequence[MessageLikeRepresentation]] = None, **kwargs: Any
|
||||
) -> Union[
|
||||
List[BaseMessage],
|
||||
Runnable[Sequence[MessageLikeRepresentation], List[BaseMessage]],
|
||||
list[BaseMessage],
|
||||
Runnable[Sequence[MessageLikeRepresentation], list[BaseMessage]],
|
||||
]:
|
||||
from langchain_core.runnables.base import RunnableLambda
|
||||
|
||||
@@ -382,11 +377,11 @@ def filter_messages(
|
||||
*,
|
||||
include_names: Optional[Sequence[str]] = None,
|
||||
exclude_names: Optional[Sequence[str]] = None,
|
||||
include_types: Optional[Sequence[Union[str, Type[BaseMessage]]]] = None,
|
||||
exclude_types: Optional[Sequence[Union[str, Type[BaseMessage]]]] = None,
|
||||
include_types: Optional[Sequence[Union[str, type[BaseMessage]]]] = None,
|
||||
exclude_types: Optional[Sequence[Union[str, type[BaseMessage]]]] = None,
|
||||
include_ids: Optional[Sequence[str]] = None,
|
||||
exclude_ids: Optional[Sequence[str]] = None,
|
||||
) -> List[BaseMessage]:
|
||||
) -> list[BaseMessage]:
|
||||
"""Filter messages based on name, type or id.
|
||||
|
||||
Args:
|
||||
@@ -438,7 +433,7 @@ def filter_messages(
|
||||
]
|
||||
""" # noqa: E501
|
||||
messages = convert_to_messages(messages)
|
||||
filtered: List[BaseMessage] = []
|
||||
filtered: list[BaseMessage] = []
|
||||
for msg in messages:
|
||||
if exclude_names and msg.name in exclude_names:
|
||||
continue
|
||||
@@ -469,7 +464,7 @@ def merge_message_runs(
|
||||
messages: Union[Iterable[MessageLikeRepresentation], PromptValue],
|
||||
*,
|
||||
chunk_separator: str = "\n",
|
||||
) -> List[BaseMessage]:
|
||||
) -> list[BaseMessage]:
|
||||
"""Merge consecutive Messages of the same type.
|
||||
|
||||
**NOTE**: ToolMessages are not merged, as each has a distinct tool call id that
|
||||
@@ -539,7 +534,7 @@ def merge_message_runs(
|
||||
if not messages:
|
||||
return []
|
||||
messages = convert_to_messages(messages)
|
||||
merged: List[BaseMessage] = []
|
||||
merged: list[BaseMessage] = []
|
||||
for msg in messages:
|
||||
curr = msg.model_copy(deep=True)
|
||||
last = merged.pop() if merged else None
|
||||
@@ -569,21 +564,21 @@ def trim_messages(
|
||||
*,
|
||||
max_tokens: int,
|
||||
token_counter: Union[
|
||||
Callable[[List[BaseMessage]], int],
|
||||
Callable[[list[BaseMessage]], int],
|
||||
Callable[[BaseMessage], int],
|
||||
BaseLanguageModel,
|
||||
],
|
||||
strategy: Literal["first", "last"] = "last",
|
||||
allow_partial: bool = False,
|
||||
end_on: Optional[
|
||||
Union[str, Type[BaseMessage], Sequence[Union[str, Type[BaseMessage]]]]
|
||||
Union[str, type[BaseMessage], Sequence[Union[str, type[BaseMessage]]]]
|
||||
] = None,
|
||||
start_on: Optional[
|
||||
Union[str, Type[BaseMessage], Sequence[Union[str, Type[BaseMessage]]]]
|
||||
Union[str, type[BaseMessage], Sequence[Union[str, type[BaseMessage]]]]
|
||||
] = None,
|
||||
include_system: bool = False,
|
||||
text_splitter: Optional[Union[Callable[[str], List[str]], TextSplitter]] = None,
|
||||
) -> List[BaseMessage]:
|
||||
text_splitter: Optional[Union[Callable[[str], list[str]], TextSplitter]] = None,
|
||||
) -> list[BaseMessage]:
|
||||
"""Trim messages to be below a token count.
|
||||
|
||||
Args:
|
||||
@@ -875,13 +870,13 @@ def _first_max_tokens(
|
||||
messages: Sequence[BaseMessage],
|
||||
*,
|
||||
max_tokens: int,
|
||||
token_counter: Callable[[List[BaseMessage]], int],
|
||||
text_splitter: Callable[[str], List[str]],
|
||||
token_counter: Callable[[list[BaseMessage]], int],
|
||||
text_splitter: Callable[[str], list[str]],
|
||||
partial_strategy: Optional[Literal["first", "last"]] = None,
|
||||
end_on: Optional[
|
||||
Union[str, Type[BaseMessage], Sequence[Union[str, Type[BaseMessage]]]]
|
||||
Union[str, type[BaseMessage], Sequence[Union[str, type[BaseMessage]]]]
|
||||
] = None,
|
||||
) -> List[BaseMessage]:
|
||||
) -> list[BaseMessage]:
|
||||
messages = list(messages)
|
||||
idx = 0
|
||||
for i in range(len(messages)):
|
||||
@@ -949,17 +944,17 @@ def _last_max_tokens(
|
||||
messages: Sequence[BaseMessage],
|
||||
*,
|
||||
max_tokens: int,
|
||||
token_counter: Callable[[List[BaseMessage]], int],
|
||||
text_splitter: Callable[[str], List[str]],
|
||||
token_counter: Callable[[list[BaseMessage]], int],
|
||||
text_splitter: Callable[[str], list[str]],
|
||||
allow_partial: bool = False,
|
||||
include_system: bool = False,
|
||||
start_on: Optional[
|
||||
Union[str, Type[BaseMessage], Sequence[Union[str, Type[BaseMessage]]]]
|
||||
Union[str, type[BaseMessage], Sequence[Union[str, type[BaseMessage]]]]
|
||||
] = None,
|
||||
end_on: Optional[
|
||||
Union[str, Type[BaseMessage], Sequence[Union[str, Type[BaseMessage]]]]
|
||||
Union[str, type[BaseMessage], Sequence[Union[str, type[BaseMessage]]]]
|
||||
] = None,
|
||||
) -> List[BaseMessage]:
|
||||
) -> list[BaseMessage]:
|
||||
messages = list(messages)
|
||||
if end_on:
|
||||
while messages and not _is_message_type(messages[-1], end_on):
|
||||
@@ -984,7 +979,7 @@ def _last_max_tokens(
|
||||
return reversed_[::-1]
|
||||
|
||||
|
||||
_MSG_CHUNK_MAP: Dict[Type[BaseMessage], Type[BaseMessageChunk]] = {
|
||||
_MSG_CHUNK_MAP: dict[type[BaseMessage], type[BaseMessageChunk]] = {
|
||||
HumanMessage: HumanMessageChunk,
|
||||
AIMessage: AIMessageChunk,
|
||||
SystemMessage: SystemMessageChunk,
|
||||
@@ -1024,14 +1019,14 @@ def _chunk_to_msg(chunk: BaseMessageChunk) -> BaseMessage:
|
||||
)
|
||||
|
||||
|
||||
def _default_text_splitter(text: str) -> List[str]:
|
||||
def _default_text_splitter(text: str) -> list[str]:
|
||||
splits = text.split("\n")
|
||||
return [s + "\n" for s in splits[:-1]] + splits[-1:]
|
||||
|
||||
|
||||
def _is_message_type(
|
||||
message: BaseMessage,
|
||||
type_: Union[str, Type[BaseMessage], Sequence[Union[str, Type[BaseMessage]]]],
|
||||
type_: Union[str, type[BaseMessage], Sequence[Union[str, type[BaseMessage]]]],
|
||||
) -> bool:
|
||||
types = [type_] if isinstance(type_, (str, type)) else type_
|
||||
types_str = [t for t in types if isinstance(t, str)]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user