core[lint]: Use 3.9 formatting for docs and tests (#30780)

Looks like `pyupgrade` was already used here but missed some docs and
tests.

This helps to keep our docs looking professional and up to date.
Eventually, we should lint / format our inline docs.
This commit is contained in:
Sydney Runkle 2025-04-11 10:39:25 -04:00 committed by GitHub
parent 48affc498b
commit fdc2b4bcac
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 222 additions and 232 deletions

View File

@ -131,7 +131,7 @@ class ChainManagerMixin:
"""Run when chain ends running.
Args:
outputs (Dict[str, Any]): The outputs of the chain.
outputs (dict[str, Any]): The outputs of the chain.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
kwargs (Any): Additional keyword arguments.
@ -248,12 +248,12 @@ class CallbackManagerMixin:
you should use on_chat_model_start instead.
Args:
serialized (Dict[str, Any]): The serialized LLM.
prompts (List[str]): The prompts.
serialized (dict[str, Any]): The serialized LLM.
prompts (list[str]): The prompts.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
metadata (Optional[Dict[str, Any]]): The metadata.
tags (Optional[list[str]]): The tags.
metadata (Optional[dict[str, Any]]): The metadata.
kwargs (Any): Additional keyword arguments.
"""
@ -274,12 +274,12 @@ class CallbackManagerMixin:
a handler for a non-chat model, you should use on_llm_start instead.
Args:
serialized (Dict[str, Any]): The serialized chat model.
messages (List[List[BaseMessage]]): The messages.
serialized (dict[str, Any]): The serialized chat model.
messages (list[list[BaseMessage]]): The messages.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
metadata (Optional[Dict[str, Any]]): The metadata.
tags (Optional[list[str]]): The tags.
metadata (Optional[dict[str, Any]]): The metadata.
kwargs (Any): Additional keyword arguments.
"""
# NotImplementedError is thrown intentionally
@ -301,12 +301,12 @@ class CallbackManagerMixin:
"""Run when the Retriever starts running.
Args:
serialized (Dict[str, Any]): The serialized Retriever.
serialized (dict[str, Any]): The serialized Retriever.
query (str): The query.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
metadata (Optional[Dict[str, Any]]): The metadata.
tags (Optional[list[str]]): The tags.
metadata (Optional[dict[str, Any]]): The metadata.
kwargs (Any): Additional keyword arguments.
"""
@ -324,12 +324,12 @@ class CallbackManagerMixin:
"""Run when a chain starts running.
Args:
serialized (Dict[str, Any]): The serialized chain.
inputs (Dict[str, Any]): The inputs.
serialized (dict[str, Any]): The serialized chain.
inputs (dict[str, Any]): The inputs.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
metadata (Optional[Dict[str, Any]]): The metadata.
tags (Optional[list[str]]): The tags.
metadata (Optional[dict[str, Any]]): The metadata.
kwargs (Any): Additional keyword arguments.
"""
@ -348,13 +348,13 @@ class CallbackManagerMixin:
"""Run when the tool starts running.
Args:
serialized (Dict[str, Any]): The serialized tool.
serialized (dict[str, Any]): The serialized tool.
input_str (str): The input string.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
metadata (Optional[Dict[str, Any]]): The metadata.
inputs (Optional[Dict[str, Any]]): The inputs.
tags (Optional[list[str]]): The tags.
metadata (Optional[dict[str, Any]]): The metadata.
inputs (Optional[dict[str, Any]]): The inputs.
kwargs (Any): Additional keyword arguments.
"""
@ -495,12 +495,12 @@ class AsyncCallbackHandler(BaseCallbackHandler):
you should use on_chat_model_start instead.
Args:
serialized (Dict[str, Any]): The serialized LLM.
prompts (List[str]): The prompts.
serialized (dict[str, Any]): The serialized LLM.
prompts (list[str]): The prompts.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
metadata (Optional[Dict[str, Any]]): The metadata.
tags (Optional[list[str]]): The tags.
metadata (Optional[dict[str, Any]]): The metadata.
kwargs (Any): Additional keyword arguments.
"""
@ -521,12 +521,12 @@ class AsyncCallbackHandler(BaseCallbackHandler):
a handler for a non-chat model, you should use on_llm_start instead.
Args:
serialized (Dict[str, Any]): The serialized chat model.
messages (List[List[BaseMessage]]): The messages.
serialized (dict[str, Any]): The serialized chat model.
messages (list[list[BaseMessage]]): The messages.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
metadata (Optional[Dict[str, Any]]): The metadata.
tags (Optional[list[str]]): The tags.
metadata (Optional[dict[str, Any]]): The metadata.
kwargs (Any): Additional keyword arguments.
"""
# NotImplementedError is thrown intentionally
@ -552,7 +552,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
containing content and other information.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -571,7 +571,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
response (LLMResult): The response which was generated.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -610,12 +610,12 @@ class AsyncCallbackHandler(BaseCallbackHandler):
"""Run when a chain starts running.
Args:
serialized (Dict[str, Any]): The serialized chain.
inputs (Dict[str, Any]): The inputs.
serialized (dict[str, Any]): The serialized chain.
inputs (dict[str, Any]): The inputs.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
metadata (Optional[Dict[str, Any]]): The metadata.
tags (Optional[list[str]]): The tags.
metadata (Optional[dict[str, Any]]): The metadata.
kwargs (Any): Additional keyword arguments.
"""
@ -631,10 +631,10 @@ class AsyncCallbackHandler(BaseCallbackHandler):
"""Run when a chain ends running.
Args:
outputs (Dict[str, Any]): The outputs of the chain.
outputs (dict[str, Any]): The outputs of the chain.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -653,7 +653,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
error (BaseException): The error that occurred.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -672,13 +672,13 @@ class AsyncCallbackHandler(BaseCallbackHandler):
"""Run when the tool starts running.
Args:
serialized (Dict[str, Any]): The serialized tool.
serialized (dict[str, Any]): The serialized tool.
input_str (str): The input string.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
metadata (Optional[Dict[str, Any]]): The metadata.
inputs (Optional[Dict[str, Any]]): The inputs.
tags (Optional[list[str]]): The tags.
metadata (Optional[dict[str, Any]]): The metadata.
inputs (Optional[dict[str, Any]]): The inputs.
kwargs (Any): Additional keyword arguments.
"""
@ -697,7 +697,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
output (Any): The output of the tool.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -716,7 +716,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
error (BaseException): The error that occurred.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -735,7 +735,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
text (str): The text.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -771,7 +771,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
action (AgentAction): The agent action.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -790,7 +790,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
finish (AgentFinish): The agent finish.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -808,12 +808,12 @@ class AsyncCallbackHandler(BaseCallbackHandler):
"""Run on the retriever start.
Args:
serialized (Dict[str, Any]): The serialized retriever.
serialized (dict[str, Any]): The serialized retriever.
query (str): The query.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
metadata (Optional[Dict[str, Any]]): The metadata.
tags (Optional[list[str]]): The tags.
metadata (Optional[dict[str, Any]]): The metadata.
kwargs (Any): Additional keyword arguments.
"""
@ -832,7 +832,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
documents (Sequence[Document]): The documents retrieved.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -851,7 +851,7 @@ class AsyncCallbackHandler(BaseCallbackHandler):
error (BaseException): The error that occurred.
run_id (UUID): The run ID. This is the ID of the current run.
parent_run_id (UUID): The parent run ID. This is the ID of the parent run.
tags (Optional[List[str]]): The tags.
tags (Optional[list[str]]): The tags.
kwargs (Any): Additional keyword arguments.
"""
@ -898,15 +898,15 @@ class BaseCallbackManager(CallbackManagerMixin):
"""Initialize callback manager.
Args:
handlers (List[BaseCallbackHandler]): The handlers.
inheritable_handlers (Optional[List[BaseCallbackHandler]]):
handlers (list[BaseCallbackHandler]): The handlers.
inheritable_handlers (Optional[list[BaseCallbackHandler]]):
The inheritable handlers. Default is None.
parent_run_id (Optional[UUID]): The parent run ID. Default is None.
tags (Optional[List[str]]): The tags. Default is None.
inheritable_tags (Optional[List[str]]): The inheritable tags.
tags (Optional[list[str]]): The tags. Default is None.
inheritable_tags (Optional[list[str]]): The inheritable tags.
Default is None.
metadata (Optional[Dict[str, Any]]): The metadata. Default is None.
inheritable_metadata (Optional[Dict[str, Any]]): The inheritable metadata.
metadata (Optional[dict[str, Any]]): The metadata. Default is None.
inheritable_metadata (Optional[dict[str, Any]]): The inheritable metadata.
Default is None.
"""
self.handlers: list[BaseCallbackHandler] = handlers
@ -1017,7 +1017,7 @@ class BaseCallbackManager(CallbackManagerMixin):
"""Set handlers as the only handlers on the callback manager.
Args:
handlers (List[BaseCallbackHandler]): The handlers to set.
handlers (list[BaseCallbackHandler]): The handlers to set.
inherit (bool): Whether to inherit the handlers. Default is True.
"""
self.handlers = []
@ -1038,7 +1038,7 @@ class BaseCallbackManager(CallbackManagerMixin):
"""Add tags to the callback manager.
Args:
tags (List[str]): The tags to add.
tags (list[str]): The tags to add.
inherit (bool): Whether to inherit the tags. Default is True.
"""
for tag in tags:
@ -1052,7 +1052,7 @@ class BaseCallbackManager(CallbackManagerMixin):
"""Remove tags from the callback manager.
Args:
tags (List[str]): The tags to remove.
tags (list[str]): The tags to remove.
"""
for tag in tags:
self.tags.remove(tag)
@ -1062,7 +1062,7 @@ class BaseCallbackManager(CallbackManagerMixin):
"""Add metadata to the callback manager.
Args:
metadata (Dict[str, Any]): The metadata to add.
metadata (dict[str, Any]): The metadata to add.
inherit (bool): Whether to inherit the metadata. Default is True.
"""
self.metadata.update(metadata)
@ -1073,7 +1073,7 @@ class BaseCallbackManager(CallbackManagerMixin):
"""Remove metadata from the callback manager.
Args:
keys (List[str]): The keys to remove.
keys (list[str]): The keys to remove.
"""
for key in keys:
self.metadata.pop(key)

View File

@ -47,8 +47,8 @@ class FileCallbackHandler(BaseCallbackHandler):
"""Print out that we are entering a chain.
Args:
serialized (Dict[str, Any]): The serialized chain.
inputs (Dict[str, Any]): The inputs to the chain.
serialized (dict[str, Any]): The serialized chain.
inputs (dict[str, Any]): The inputs to the chain.
**kwargs (Any): Additional keyword arguments.
"""
if "name" in kwargs:
@ -68,7 +68,7 @@ class FileCallbackHandler(BaseCallbackHandler):
"""Print out that we finished a chain.
Args:
outputs (Dict[str, Any]): The outputs of the chain.
outputs (dict[str, Any]): The outputs of the chain.
**kwargs (Any): Additional keyword arguments.
"""
print_text("\n\033[1m> Finished chain.\033[0m", end="\n", file=self.file)

View File

@ -79,16 +79,16 @@ def trace_as_chain_group(
group_name (str): The name of the chain group.
callback_manager (CallbackManager, optional): The callback manager to use.
Defaults to None.
inputs (Dict[str, Any], optional): The inputs to the chain group.
inputs (dict[str, Any], optional): The inputs to the chain group.
Defaults to None.
project_name (str, optional): The name of the project.
Defaults to None.
example_id (str or UUID, optional): The ID of the example.
Defaults to None.
run_id (UUID, optional): The ID of the run.
tags (List[str], optional): The inheritable tags to apply to all runs.
tags (list[str], optional): The inheritable tags to apply to all runs.
Defaults to None.
metadata (Dict[str, Any], optional): The metadata to apply to all runs.
metadata (dict[str, Any], optional): The metadata to apply to all runs.
Defaults to None.
Note: must have LANGCHAIN_TRACING_V2 env var set to true to see the trace in LangSmith.
@ -160,16 +160,16 @@ async def atrace_as_chain_group(
group_name (str): The name of the chain group.
callback_manager (AsyncCallbackManager, optional): The async callback manager to use,
which manages tracing and other callback behavior. Defaults to None.
inputs (Dict[str, Any], optional): The inputs to the chain group.
inputs (dict[str, Any], optional): The inputs to the chain group.
Defaults to None.
project_name (str, optional): The name of the project.
Defaults to None.
example_id (str or UUID, optional): The ID of the example.
Defaults to None.
run_id (UUID, optional): The ID of the run.
tags (List[str], optional): The inheritable tags to apply to all runs.
tags (list[str], optional): The inheritable tags to apply to all runs.
Defaults to None.
metadata (Dict[str, Any], optional): The metadata to apply to all runs.
metadata (dict[str, Any], optional): The metadata to apply to all runs.
Defaults to None.
Returns:
@ -461,17 +461,17 @@ class BaseRunManager(RunManagerMixin):
Args:
run_id (UUID): The ID of the run.
handlers (List[BaseCallbackHandler]): The list of handlers.
inheritable_handlers (List[BaseCallbackHandler]):
handlers (list[BaseCallbackHandler]): The list of handlers.
inheritable_handlers (list[BaseCallbackHandler]):
The list of inheritable handlers.
parent_run_id (UUID, optional): The ID of the parent run.
Defaults to None.
tags (Optional[List[str]]): The list of tags. Defaults to None.
inheritable_tags (Optional[List[str]]): The list of inheritable tags.
tags (Optional[list[str]]): The list of tags. Defaults to None.
inheritable_tags (Optional[list[str]]): The list of inheritable tags.
Defaults to None.
metadata (Optional[Dict[str, Any]]): The metadata.
metadata (Optional[dict[str, Any]]): The metadata.
Defaults to None.
inheritable_metadata (Optional[Dict[str, Any]]): The inheritable metadata.
inheritable_metadata (Optional[dict[str, Any]]): The inheritable metadata.
Defaults to None.
"""
self.run_id = run_id
@ -831,7 +831,7 @@ class CallbackManagerForChainRun(ParentRunManager, ChainManagerMixin):
"""Run when chain ends running.
Args:
outputs (Union[Dict[str, Any], Any]): The outputs of the chain.
outputs (Union[dict[str, Any], Any]): The outputs of the chain.
**kwargs (Any): Additional keyword arguments.
"""
handle_event(
@ -937,7 +937,7 @@ class AsyncCallbackManagerForChainRun(AsyncParentRunManager, ChainManagerMixin):
"""Run when a chain ends running.
Args:
outputs (Union[Dict[str, Any], Any]): The outputs of the chain.
outputs (Union[dict[str, Any], Any]): The outputs of the chain.
**kwargs (Any): Additional keyword arguments.
"""
await ahandle_event(
@ -1259,13 +1259,13 @@ class CallbackManager(BaseCallbackManager):
"""Run when LLM starts running.
Args:
serialized (Dict[str, Any]): The serialized LLM.
prompts (List[str]): The list of prompts.
serialized (dict[str, Any]): The serialized LLM.
prompts (list[str]): The list of prompts.
run_id (UUID, optional): The ID of the run. Defaults to None.
**kwargs (Any): Additional keyword arguments.
Returns:
List[CallbackManagerForLLMRun]: A callback manager for each
list[CallbackManagerForLLMRun]: A callback manager for each
prompt as an LLM run.
"""
managers = []
@ -1310,13 +1310,13 @@ class CallbackManager(BaseCallbackManager):
"""Run when chat model starts running.
Args:
serialized (Dict[str, Any]): The serialized LLM.
messages (List[List[BaseMessage]]): The list of messages.
serialized (dict[str, Any]): The serialized LLM.
messages (list[list[BaseMessage]]): The list of messages.
run_id (UUID, optional): The ID of the run. Defaults to None.
**kwargs (Any): Additional keyword arguments.
Returns:
List[CallbackManagerForLLMRun]: A callback manager for each
list[CallbackManagerForLLMRun]: A callback manager for each
list of messages as an LLM run.
"""
managers = []
@ -1364,8 +1364,8 @@ class CallbackManager(BaseCallbackManager):
"""Run when chain starts running.
Args:
serialized (Optional[Dict[str, Any]]): The serialized chain.
inputs (Union[Dict[str, Any], Any]): The inputs to the chain.
serialized (Optional[dict[str, Any]]): The serialized chain.
inputs (Union[dict[str, Any], Any]): The inputs to the chain.
run_id (UUID, optional): The ID of the run. Defaults to None.
**kwargs (Any): Additional keyword arguments.
@ -1466,7 +1466,7 @@ class CallbackManager(BaseCallbackManager):
"""Run when the retriever starts running.
Args:
serialized (Optional[Dict[str, Any]]): The serialized retriever.
serialized (Optional[dict[str, Any]]): The serialized retriever.
query (str): The query.
run_id (UUID, optional): The ID of the run. Defaults to None.
parent_run_id (UUID, optional): The ID of the parent run. Defaults to None.
@ -1559,13 +1559,13 @@ class CallbackManager(BaseCallbackManager):
local_callbacks (Optional[Callbacks], optional): The local callbacks.
Defaults to None.
verbose (bool, optional): Whether to enable verbose mode. Defaults to False.
inheritable_tags (Optional[List[str]], optional): The inheritable tags.
inheritable_tags (Optional[list[str]], optional): The inheritable tags.
Defaults to None.
local_tags (Optional[List[str]], optional): The local tags.
local_tags (Optional[list[str]], optional): The local tags.
Defaults to None.
inheritable_metadata (Optional[Dict[str, Any]], optional): The inheritable
inheritable_metadata (Optional[dict[str, Any]], optional): The inheritable
metadata. Defaults to None.
local_metadata (Optional[Dict[str, Any]], optional): The local metadata.
local_metadata (Optional[dict[str, Any]], optional): The local metadata.
Defaults to None.
Returns:
@ -1598,8 +1598,8 @@ class CallbackManagerForChainGroup(CallbackManager):
"""Initialize the callback manager.
Args:
handlers (List[BaseCallbackHandler]): The list of handlers.
inheritable_handlers (Optional[List[BaseCallbackHandler]]): The list of
handlers (list[BaseCallbackHandler]): The list of handlers.
inheritable_handlers (Optional[list[BaseCallbackHandler]]): The list of
inheritable handlers. Defaults to None.
parent_run_id (Optional[UUID]): The ID of the parent run. Defaults to None.
parent_run_manager (CallbackManagerForChainRun): The parent run manager.
@ -1690,7 +1690,7 @@ class CallbackManagerForChainGroup(CallbackManager):
"""Run when traced chain group ends.
Args:
outputs (Union[Dict[str, Any], Any]): The outputs of the chain.
outputs (Union[dict[str, Any], Any]): The outputs of the chain.
**kwargs (Any): Additional keyword arguments.
"""
self.ended = True
@ -1729,13 +1729,13 @@ class AsyncCallbackManager(BaseCallbackManager):
"""Run when LLM starts running.
Args:
serialized (Dict[str, Any]): The serialized LLM.
prompts (List[str]): The list of prompts.
serialized (dict[str, Any]): The serialized LLM.
prompts (list[str]): The list of prompts.
run_id (UUID, optional): The ID of the run. Defaults to None.
**kwargs (Any): Additional keyword arguments.
Returns:
List[AsyncCallbackManagerForLLMRun]: The list of async
list[AsyncCallbackManagerForLLMRun]: The list of async
callback managers, one for each LLM Run corresponding
to each prompt.
"""
@ -1818,13 +1818,13 @@ class AsyncCallbackManager(BaseCallbackManager):
"""Async run when LLM starts running.
Args:
serialized (Dict[str, Any]): The serialized LLM.
messages (List[List[BaseMessage]]): The list of messages.
serialized (dict[str, Any]): The serialized LLM.
messages (list[list[BaseMessage]]): The list of messages.
run_id (UUID, optional): The ID of the run. Defaults to None.
**kwargs (Any): Additional keyword arguments.
Returns:
List[AsyncCallbackManagerForLLMRun]: The list of
list[AsyncCallbackManagerForLLMRun]: The list of
async callback managers, one for each LLM Run
corresponding to each inner message list.
"""
@ -1890,8 +1890,8 @@ class AsyncCallbackManager(BaseCallbackManager):
"""Async run when chain starts running.
Args:
serialized (Optional[Dict[str, Any]]): The serialized chain.
inputs (Union[Dict[str, Any], Any]): The inputs to the chain.
serialized (Optional[dict[str, Any]]): The serialized chain.
inputs (Union[dict[str, Any], Any]): The inputs to the chain.
run_id (UUID, optional): The ID of the run. Defaults to None.
**kwargs (Any): Additional keyword arguments.
@ -1938,7 +1938,7 @@ class AsyncCallbackManager(BaseCallbackManager):
"""Run when the tool starts running.
Args:
serialized (Optional[Dict[str, Any]]): The serialized tool.
serialized (Optional[dict[str, Any]]): The serialized tool.
input_str (str): The input to the tool.
run_id (UUID, optional): The ID of the run. Defaults to None.
parent_run_id (UUID, optional): The ID of the parent run.
@ -2029,7 +2029,7 @@ class AsyncCallbackManager(BaseCallbackManager):
"""Run when the retriever starts running.
Args:
serialized (Optional[Dict[str, Any]]): The serialized retriever.
serialized (Optional[dict[str, Any]]): The serialized retriever.
query (str): The query.
run_id (UUID, optional): The ID of the run. Defaults to None.
parent_run_id (UUID, optional): The ID of the parent run. Defaults to None.
@ -2085,13 +2085,13 @@ class AsyncCallbackManager(BaseCallbackManager):
local_callbacks (Optional[Callbacks], optional): The local callbacks.
Defaults to None.
verbose (bool, optional): Whether to enable verbose mode. Defaults to False.
inheritable_tags (Optional[List[str]], optional): The inheritable tags.
inheritable_tags (Optional[list[str]], optional): The inheritable tags.
Defaults to None.
local_tags (Optional[List[str]], optional): The local tags.
local_tags (Optional[list[str]], optional): The local tags.
Defaults to None.
inheritable_metadata (Optional[Dict[str, Any]], optional): The inheritable
inheritable_metadata (Optional[dict[str, Any]], optional): The inheritable
metadata. Defaults to None.
local_metadata (Optional[Dict[str, Any]], optional): The local metadata.
local_metadata (Optional[dict[str, Any]], optional): The local metadata.
Defaults to None.
Returns:
@ -2124,8 +2124,8 @@ class AsyncCallbackManagerForChainGroup(AsyncCallbackManager):
"""Initialize the async callback manager.
Args:
handlers (List[BaseCallbackHandler]): The list of handlers.
inheritable_handlers (Optional[List[BaseCallbackHandler]]): The list of
handlers (list[BaseCallbackHandler]): The list of handlers.
inheritable_handlers (Optional[list[BaseCallbackHandler]]): The list of
inheritable handlers. Defaults to None.
parent_run_id (Optional[UUID]): The ID of the parent run. Defaults to None.
parent_run_manager (AsyncCallbackManagerForChainRun):
@ -2219,7 +2219,7 @@ class AsyncCallbackManagerForChainGroup(AsyncCallbackManager):
"""Run when traced chain group ends.
Args:
outputs (Union[Dict[str, Any], Any]): The outputs of the chain.
outputs (Union[dict[str, Any], Any]): The outputs of the chain.
**kwargs (Any): Additional keyword arguments.
"""
self.ended = True
@ -2263,12 +2263,12 @@ def _configure(
local_callbacks (Optional[Callbacks], optional): The local callbacks.
Defaults to None.
verbose (bool, optional): Whether to enable verbose mode. Defaults to False.
inheritable_tags (Optional[List[str]], optional): The inheritable tags.
inheritable_tags (Optional[list[str]], optional): The inheritable tags.
Defaults to None.
local_tags (Optional[List[str]], optional): The local tags. Defaults to None.
inheritable_metadata (Optional[Dict[str, Any]], optional): The inheritable
local_tags (Optional[list[str]], optional): The local tags. Defaults to None.
inheritable_metadata (Optional[dict[str, Any]], optional): The inheritable
metadata. Defaults to None.
local_metadata (Optional[Dict[str, Any]], optional): The local metadata.
local_metadata (Optional[dict[str, Any]], optional): The local metadata.
Defaults to None.
Returns:
@ -2460,8 +2460,8 @@ async def adispatch_custom_event(
data: Any,
*,
run_id: UUID,
tags: Optional[List[str]] = None,
metadata: Optional[Dict[str, Any]] = None,
tags: Optional[list[str]] = None,
metadata: Optional[dict[str, Any]] = None,
**kwargs: Any,
) -> None:
print(f"Received custom event: {name} with data: {data}")
@ -2492,8 +2492,8 @@ async def adispatch_custom_event(
data: Any,
*,
run_id: UUID,
tags: Optional[List[str]] = None,
metadata: Optional[Dict[str, Any]] = None,
tags: Optional[list[str]] = None,
metadata: Optional[dict[str, Any]] = None,
**kwargs: Any,
) -> None:
print(f"Received custom event: {name} with data: {data}")
@ -2578,8 +2578,8 @@ def dispatch_custom_event(
data: Any,
*,
run_id: UUID,
tags: Optional[List[str]] = None,
metadata: Optional[Dict[str, Any]] = None,
tags: Optional[list[str]] = None,
metadata: Optional[dict[str, Any]] = None,
**kwargs: Any,
) -> None:
print(f"Received custom event: {name} with data: {data}")

View File

@ -31,8 +31,8 @@ class StdOutCallbackHandler(BaseCallbackHandler):
"""Print out that we are entering a chain.
Args:
serialized (Dict[str, Any]): The serialized chain.
inputs (Dict[str, Any]): The inputs to the chain.
serialized (dict[str, Any]): The serialized chain.
inputs (dict[str, Any]): The inputs to the chain.
**kwargs (Any): Additional keyword arguments.
"""
if "name" in kwargs:
@ -48,7 +48,7 @@ class StdOutCallbackHandler(BaseCallbackHandler):
"""Print out that we finished a chain.
Args:
outputs (Dict[str, Any]): The outputs of the chain.
outputs (dict[str, Any]): The outputs of the chain.
**kwargs (Any): Additional keyword arguments.
"""
print("\n\033[1m> Finished chain.\033[0m") # noqa: T201

View File

@ -24,8 +24,8 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
"""Run when LLM starts running.
Args:
serialized (Dict[str, Any]): The serialized LLM.
prompts (List[str]): The prompts to run.
serialized (dict[str, Any]): The serialized LLM.
prompts (list[str]): The prompts to run.
**kwargs (Any): Additional keyword arguments.
"""
@ -38,8 +38,8 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
"""Run when LLM starts running.
Args:
serialized (Dict[str, Any]): The serialized LLM.
messages (List[List[BaseMessage]]): The messages to run.
serialized (dict[str, Any]): The serialized LLM.
messages (list[list[BaseMessage]]): The messages to run.
**kwargs (Any): Additional keyword arguments.
"""
@ -76,8 +76,8 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
"""Run when a chain starts running.
Args:
serialized (Dict[str, Any]): The serialized chain.
inputs (Dict[str, Any]): The inputs to the chain.
serialized (dict[str, Any]): The serialized chain.
inputs (dict[str, Any]): The inputs to the chain.
**kwargs (Any): Additional keyword arguments.
"""
@ -85,7 +85,7 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
"""Run when a chain ends running.
Args:
outputs (Dict[str, Any]): The outputs of the chain.
outputs (dict[str, Any]): The outputs of the chain.
**kwargs (Any): Additional keyword arguments.
"""
@ -103,7 +103,7 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
"""Run when the tool starts running.
Args:
serialized (Dict[str, Any]): The serialized tool.
serialized (dict[str, Any]): The serialized tool.
input_str (str): The input string.
**kwargs (Any): Additional keyword arguments.
"""

View File

@ -630,7 +630,7 @@ class DocumentIndex(BaseRetriever):
kwargs: Additional keyword arguments. These are up to the implementation.
Returns:
List[Document]: List of documents that were found.
list[Document]: List of documents that were found.
"""
async def aget(
@ -656,7 +656,7 @@ class DocumentIndex(BaseRetriever):
kwargs: Additional keyword arguments. These are up to the implementation.
Returns:
List[Document]: List of documents that were found.
list[Document]: List of documents that were found.
"""
return await run_in_executor(
None,

View File

@ -171,7 +171,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| Method | Input | Output | Description |
+===========================+================================================================+=====================================================================+==================================================================================================+
| `invoke` | str | List[dict | tuple | BaseMessage] | PromptValue | BaseMessage | A single chat model call. |
| `invoke` | str | list[dict | tuple | BaseMessage] | PromptValue | BaseMessage | A single chat model call. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `ainvoke` | ''' | BaseMessage | Defaults to running invoke in an async executor. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
@ -181,13 +181,13 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `astream_events` | ''' | AsyncIterator[StreamEvent] | Event types: 'on_chat_model_start', 'on_chat_model_stream', 'on_chat_model_end'. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `batch` | List['''] | List[BaseMessage] | Defaults to running invoke in concurrent threads. |
| `batch` | list['''] | list[BaseMessage] | Defaults to running invoke in concurrent threads. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `abatch` | List['''] | List[BaseMessage] | Defaults to running ainvoke in concurrent threads. |
| `abatch` | list['''] | list[BaseMessage] | Defaults to running ainvoke in concurrent threads. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `batch_as_completed` | List['''] | Iterator[Tuple[int, Union[BaseMessage, Exception]]] | Defaults to running invoke in concurrent threads. |
| `batch_as_completed` | list['''] | Iterator[tuple[int, Union[BaseMessage, Exception]]] | Defaults to running invoke in concurrent threads. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `abatch_as_completed` | List['''] | AsyncIterator[Tuple[int, Union[BaseMessage, Exception]]] | Defaults to running ainvoke in concurrent threads. |
| `abatch_as_completed` | list['''] | AsyncIterator[tuple[int, Union[BaseMessage, Exception]]] | Defaults to running ainvoke in concurrent threads. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
This table provides a brief overview of the main imperative methods. Please see the base Runnable reference for full documentation.

View File

@ -857,7 +857,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
"""
if not isinstance(prompts, list):
msg = (
"Argument 'prompts' is expected to be of type List[str], received"
"Argument 'prompts' is expected to be of type list[str], received"
f" argument of type {type(prompts)}."
)
raise ValueError(msg) # noqa: TRY004

View File

@ -39,16 +39,16 @@ class BaseMemory(Serializable, ABC):
.. code-block:: python
class SimpleMemory(BaseMemory):
memories: Dict[str, Any] = dict()
memories: dict[str, Any] = dict()
@property
def memory_variables(self) -> List[str]:
def memory_variables(self) -> list[str]:
return list(self.memories.keys())
def load_memory_variables(self, inputs: Dict[str, Any]) -> Dict[str, str]:
def load_memory_variables(self, inputs: dict[str, Any]) -> dict[str, str]:
return self.memories
def save_context(self, inputs: Dict[str, Any], outputs: Dict[str, str]) -> None:
def save_context(self, inputs: dict[str, Any], outputs: dict[str, str]) -> None:
pass
def clear(self) -> None:

View File

@ -734,8 +734,6 @@ def trim_messages(
.. code-block:: python
from typing import list
from langchain_core.messages import (
AIMessage,
HumanMessage,

View File

@ -31,8 +31,8 @@ class LLMResult(BaseModel):
The second dimension of the list represents different candidate generations
for a given prompt.
When returned from an LLM the type is List[List[Generation]].
When returned from a chat model the type is List[List[ChatGeneration]].
When returned from an LLM the type is list[list[Generation]].
When returned from a chat model the type is list[list[ChatGeneration]].
ChatGeneration is a subclass of Generation that has a field for a structured
chat message.
@ -56,7 +56,7 @@ class LLMResult(BaseModel):
def flatten(self) -> list[LLMResult]:
"""Flatten generations into a single list.
Unpack List[List[Generation]] -> List[LLMResult] where each returned LLMResult
Unpack list[list[Generation]] -> list[LLMResult] where each returned LLMResult
contains only a single Generation. If token usage information is available,
it is kept only for the LLMResult corresponding to the top-choice
Generation, to avoid over-counting of token usage downstream.

View File

@ -6,9 +6,7 @@ import abc
import asyncio
import threading
import time
from typing import (
Optional,
)
from typing import Optional
class BaseRateLimiter(abc.ABC):

View File

@ -90,17 +90,16 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
from langchain_core.documents import Document
from langchain_core.retrievers import BaseRetriever
from typing import List
class SimpleRetriever(BaseRetriever):
docs: List[Document]
docs: list[Document]
k: int = 5
def _get_relevant_documents(self, query: str) -> List[Document]:
def _get_relevant_documents(self, query: str) -> list[Document]:
\"\"\"Return the first k documents from the list of documents\"\"\"
return self.docs[:self.k]
async def _aget_relevant_documents(self, query: str) -> List[Document]:
async def _aget_relevant_documents(self, query: str) -> list[Document]:
\"\"\"(Optional) async native implementation.\"\"\"
return self.docs[:self.k]
@ -112,14 +111,14 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
class TFIDFRetriever(BaseRetriever, BaseModel):
vectorizer: Any
docs: List[Document]
docs: list[Document]
tfidf_array: Any
k: int = 4
class Config:
arbitrary_types_allowed = True
def _get_relevant_documents(self, query: str) -> List[Document]:
def _get_relevant_documents(self, query: str) -> list[Document]:
# Ip -- (n_docs,x), Op -- (n_docs,n_Feats)
query_vec = self.vectorizer.transform([query])
# Op -- (n_docs,1) -- Cosine Sim with each doc

View File

@ -1159,16 +1159,16 @@ class Runnable(Generic[Input, Output], ABC):
the Runnable that emitted the event.
A child Runnable that gets invoked as part of the execution of a
parent Runnable is assigned its own unique ID.
- ``parent_ids``: **List[str]** - The IDs of the parent runnables that
- ``parent_ids``: **list[str]** - The IDs of the parent runnables that
generated the event. The root Runnable will have an empty list.
The order of the parent IDs is from the root to the immediate parent.
Only available for v2 version of the API. The v1 version of the API
will return an empty list.
- ``tags``: **Optional[List[str]]** - The tags of the Runnable that generated
- ``tags``: **Optional[list[str]]** - The tags of the Runnable that generated
the event.
- ``metadata``: **Optional[Dict[str, Any]]** - The metadata of the Runnable
- ``metadata``: **Optional[dict[str, Any]]** - The metadata of the Runnable
that generated the event.
- ``data``: **Dict[str, Any]**
- ``data``: **dict[str, Any]**
Below is a table that illustrates some events that might be emitted by various
@ -1231,7 +1231,7 @@ class Runnable(Generic[Input, Output], ABC):
.. code-block:: python
def format_docs(docs: List[Document]) -> str:
def format_docs(docs: list[Document]) -> str:
'''Format the docs.'''
return ", ".join([doc.page_content for doc in docs])
@ -2378,13 +2378,12 @@ class Runnable(Generic[Input, Output], ABC):
.. code-block:: python
from typing import List
from typing_extensions import TypedDict
from langchain_core.runnables import RunnableLambda
class Args(TypedDict):
a: int
b: List[int]
b: list[int]
def f(x: Args) -> str:
return str(x["a"] * max(x["b"]))
@ -2397,18 +2396,18 @@ class Runnable(Generic[Input, Output], ABC):
.. code-block:: python
from typing import Any, Dict, List
from typing import Any
from pydantic import BaseModel, Field
from langchain_core.runnables import RunnableLambda
def f(x: Dict[str, Any]) -> str:
def f(x: dict[str, Any]) -> str:
return str(x["a"] * max(x["b"]))
class FSchema(BaseModel):
\"\"\"Apply a function to an integer and list of integers.\"\"\"
a: int = Field(..., description="Integer")
b: List[int] = Field(..., description="List of ints")
b: list[int] = Field(..., description="List of ints")
runnable = RunnableLambda(f)
as_tool = runnable.as_tool(FSchema)
@ -2418,14 +2417,14 @@ class Runnable(Generic[Input, Output], ABC):
.. code-block:: python
from typing import Any, Dict, List
from typing import Any
from langchain_core.runnables import RunnableLambda
def f(x: Dict[str, Any]) -> str:
def f(x: dict[str, Any]) -> str:
return str(x["a"] * max(x["b"]))
runnable = RunnableLambda(f)
as_tool = runnable.as_tool(arg_types={"a": int, "b": List[int]})
as_tool = runnable.as_tool(arg_types={"a": int, "b": list[int]})
as_tool.invoke({"a": 3, "b": [1, 2]})
String input:
@ -5279,17 +5278,17 @@ class RunnableBindingBase(RunnableSerializable[Input, Output]):
)
"""The config factories to bind to the underlying Runnable."""
# Union[Type[Input], BaseModel] + things like List[str]
# Union[Type[Input], BaseModel] + things like list[str]
custom_input_type: Optional[Any] = None
"""Override the input type of the underlying Runnable with a custom type.
The type can be a pydantic model, or a type annotation (e.g., `List[str]`).
The type can be a pydantic model, or a type annotation (e.g., `list[str]`).
"""
# Union[Type[Output], BaseModel] + things like List[str]
# Union[Type[Output], BaseModel] + things like list[str]
custom_output_type: Optional[Any] = None
"""Override the output type of the underlying Runnable with a custom type.
The type can be a pydantic model, or a type annotation (e.g., `List[str]`).
The type can be a pydantic model, or a type annotation (e.g., `list[str]`).
"""
model_config = ConfigDict(

View File

@ -243,12 +243,12 @@ def get_config_list(
It is useful for subclasses overriding batch() or abatch().
Args:
config (Optional[Union[RunnableConfig, List[RunnableConfig]]]):
config (Optional[Union[RunnableConfig, list[RunnableConfig]]]):
The config or list of configs.
length (int): The length of the list.
Returns:
List[RunnableConfig]: The list of configs.
list[RunnableConfig]: The list of configs.
Raises:
ValueError: If the length of the list is not equal to the length of the inputs.
@ -302,7 +302,7 @@ def patch_config(
max_concurrency (Optional[int], optional): The max concurrency to set.
Defaults to None.
run_name (Optional[str], optional): The run name to set. Defaults to None.
configurable (Optional[Dict[str, Any]], optional): The configurable to set.
configurable (Optional[dict[str, Any]], optional): The configurable to set.
Defaults to None.
Returns:

View File

@ -127,7 +127,7 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
config: The configuration to use. Defaults to None.
Returns:
Tuple[Runnable[Input, Output], RunnableConfig]: The prepared Runnable and
tuple[Runnable[Input, Output], RunnableConfig]: The prepared Runnable and
configuration.
"""
runnable: Runnable[Input, Output] = self
@ -388,7 +388,7 @@ class RunnableConfigurableFields(DynamicRunnable[Input, Output]):
"""Get the configuration specs for the RunnableConfigurableFields.
Returns:
List[ConfigurableFieldSpec]: The configuration specs.
list[ConfigurableFieldSpec]: The configuration specs.
"""
config_specs = []

View File

@ -36,7 +36,7 @@ def draw_mermaid(
Args:
nodes (dict[str, str]): List of node ids.
edges (List[Edge]): List of edges, object with a source,
edges (list[Edge]): List of edges, object with a source,
target and data.
first_node (str, optional): Id of the first node. Defaults to None.
last_node (str, optional): Id of the last node. Defaults to None.

View File

@ -91,7 +91,6 @@ class RunnableWithMessageHistory(RunnableBindingBase):
.. code-block:: python
from operator import itemgetter
from typing import List
from langchain_openai.chat_models import ChatOpenAI
@ -111,9 +110,9 @@ class RunnableWithMessageHistory(RunnableBindingBase):
class InMemoryHistory(BaseChatMessageHistory, BaseModel):
\"\"\"In memory implementation of chat message history.\"\"\"
messages: List[BaseMessage] = Field(default_factory=list)
messages: list[BaseMessage] = Field(default_factory=list)
def add_messages(self, messages: List[BaseMessage]) -> None:
def add_messages(self, messages: list[BaseMessage]) -> None:
\"\"\"Add a list of messages to the store\"\"\"
self.messages.extend(messages)
@ -470,7 +469,7 @@ class RunnableWithMessageHistory(RunnableBindingBase):
return input_val[0]
return list(input_val)
msg = (
f"Expected str, BaseMessage, List[BaseMessage], or Tuple[BaseMessage]. "
f"Expected str, BaseMessage, list[BaseMessage], or tuple[BaseMessage]. "
f"Got {input_val}."
)
raise ValueError(msg) # noqa: TRY004
@ -505,7 +504,7 @@ class RunnableWithMessageHistory(RunnableBindingBase):
if isinstance(output_val, (list, tuple)):
return list(output_val)
msg = (
f"Expected str, BaseMessage, List[BaseMessage], or Tuple[BaseMessage]. "
f"Expected str, BaseMessage, list[BaseMessage], or tuple[BaseMessage]. "
f"Got {output_val}."
)
raise ValueError(msg) # noqa: TRY004

View File

@ -360,7 +360,7 @@ _graph_passthrough: RunnablePassthrough = RunnablePassthrough()
class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
"""Runnable that assigns key-value pairs to Dict[str, Any] inputs.
"""Runnable that assigns key-value pairs to dict[str, Any] inputs.
The `RunnableAssign` class takes input dictionaries and, through a
`RunnableParallel` instance, applies transformations, then combines
@ -371,14 +371,13 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
.. code-block:: python
# This is a RunnableAssign
from typing import Dict
from langchain_core.runnables.passthrough import (
RunnableAssign,
RunnableParallel,
)
from langchain_core.runnables.base import RunnableLambda
def add_ten(x: Dict[str, int]) -> Dict[str, int]:
def add_ten(x: dict[str, int]) -> dict[str, int]:
return {"added": x["input"] + 10}
mapper = RunnableParallel(
@ -676,7 +675,7 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
class RunnablePick(RunnableSerializable[dict[str, Any], dict[str, Any]]):
"""Runnable that picks keys from Dict[str, Any] inputs.
"""Runnable that picks keys from dict[str, Any] inputs.
RunnablePick class represents a Runnable that selectively picks keys from a
dictionary input. It allows you to specify one or more keys to extract

View File

@ -394,7 +394,7 @@ def get_function_first_arg_dict_keys(func: Callable) -> Optional[list[str]]:
func: The function to check.
Returns:
Optional[List[str]]: The keys of the first argument if it is a dict,
Optional[list[str]]: The keys of the first argument if it is a dict,
None otherwise.
"""
try:
@ -438,7 +438,7 @@ def get_function_nonlocals(func: Callable) -> list[Any]:
func: The function to check.
Returns:
List[Any]: The nonlocal variables accessed by the function.
list[Any]: The nonlocal variables accessed by the function.
"""
try:
code = inspect.getsource(func)
@ -683,7 +683,7 @@ def get_unique_config_specs(
specs: The config specs.
Returns:
List[ConfigurableFieldSpec]: The unique config specs.
list[ConfigurableFieldSpec]: The unique config specs.
Raises:
ValueError: If the runnable sequence contains conflicting config specs.

View File

@ -107,14 +107,14 @@ class BaseStore(Generic[K, V], ABC):
"""Set the values for the given keys.
Args:
key_value_pairs (Sequence[Tuple[K, V]]): A sequence of key-value pairs.
key_value_pairs (Sequence[tuple[K, V]]): A sequence of key-value pairs.
"""
async def amset(self, key_value_pairs: Sequence[tuple[K, V]]) -> None:
"""Async set the values for the given keys.
Args:
key_value_pairs (Sequence[Tuple[K, V]]): A sequence of key-value pairs.
key_value_pairs (Sequence[tuple[K, V]]): A sequence of key-value pairs.
"""
return await run_in_executor(None, self.mset, key_value_pairs)
@ -209,7 +209,7 @@ class InMemoryBaseStore(BaseStore[str, V], Generic[V]):
"""Set the values for the given keys.
Args:
key_value_pairs (Sequence[Tuple[str, V]]): A sequence of key-value pairs.
key_value_pairs (Sequence[tuple[str, V]]): A sequence of key-value pairs.
Returns:
None
@ -221,7 +221,7 @@ class InMemoryBaseStore(BaseStore[str, V], Generic[V]):
"""Async set the values for the given keys.
Args:
key_value_pairs (Sequence[Tuple[str, V]]): A sequence of key-value pairs.
key_value_pairs (Sequence[tuple[str, V]]): A sequence of key-value pairs.
Returns:
None
@ -284,7 +284,7 @@ class InMemoryStore(InMemoryBaseStore[Any]):
"""In-memory store for any type of data.
Attributes:
store (Dict[str, Any]): The underlying dictionary that stores
store (dict[str, Any]): The underlying dictionary that stores
the key-value pairs.
Examples:
@ -309,7 +309,7 @@ class InMemoryByteStore(InMemoryBaseStore[bytes]):
"""In-memory store for bytes.
Attributes:
store (Dict[str, bytes]): The underlying dictionary that stores
store (dict[str, bytes]): The underlying dictionary that stores
the key-value pairs.
Examples:

View File

@ -140,7 +140,7 @@ def tool(
return
@tool(response_format="content_and_artifact")
def search_api(query: str) -> Tuple[str, dict]:
def search_api(query: str) -> tuple[str, dict]:
return "partial json of results", {"full": "object of results"}
.. versionadded:: 0.2.14

View File

@ -65,7 +65,7 @@ def tracing_v2_enabled(
Defaults to "default".
example_id (str or UUID, optional): The ID of the example.
Defaults to None.
tags (List[str], optional): The tags to add to the run.
tags (list[str], optional): The tags to add to the run.
Defaults to None.
client (LangSmithClient, optional): The client of the langsmith.
Defaults to None.

View File

@ -46,7 +46,7 @@ class EvaluatorCallbackHandler(BaseTracer):
The sequence of run evaluators to be executed.
executor : ThreadPoolExecutor
The thread pool executor used for running the evaluators.
futures : Set[Future]
futures : set[Future]
The set of futures representing the running evaluators.
skip_unfinished : bool
Whether to skip runs that are not finished or raised

View File

@ -84,7 +84,7 @@ class RunState(TypedDict):
"""Type of the object being run, eg. prompt, chain, llm, etc."""
# Do we want tags/metadata on the root run? Client kinda knows it in most situations
# tags: List[str]
# tags: list[str]
logs: dict[str, LogEntry]
"""Map of run names to sub-runs. If filters were supplied, this list will

View File

@ -621,9 +621,9 @@ def tool_example_to_messages(
Arguments:
input: string, the user input
tool_calls: List[BaseModel], a list of tool calls represented as Pydantic
tool_calls: list[BaseModel], a list of tool calls represented as Pydantic
BaseModels
tool_outputs: Optional[List[str]], a list of tool call outputs.
tool_outputs: Optional[list[str]], a list of tool call outputs.
Does not need to be provided. If not provided, a placeholder value
will be inserted. Defaults to None.
ai_response: Optional[str], if provided, content for a final AIMessage.
@ -635,7 +635,7 @@ def tool_example_to_messages(
.. code-block:: python
from typing import List, Optional
from typing import Optional
from pydantic import BaseModel, Field
from langchain_openai import ChatOpenAI

View File

@ -44,7 +44,7 @@ def find_all_links(
pattern: Regex to use for extracting links from raw HTML.
Returns:
List[str]: all links
list[str]: all links
"""
pattern = pattern or DEFAULT_LINK_REGEX
return list(set(re.findall(pattern, raw_html)))
@ -74,7 +74,7 @@ def extract_sub_links(
exception. Otherwise, raise the exception.
Returns:
List[str]: sub links.
list[str]: sub links.
"""
base_url_to_use = base_url if base_url is not None else url
parsed_base_url = urlparse(base_url_to_use)

View File

@ -48,7 +48,7 @@ def grab_literal(template: str, l_del: str) -> tuple[str, str]:
l_del: The left delimiter.
Returns:
Tuple[str, str]: The literal and the template.
tuple[str, str]: The literal and the template.
"""
global _CURRENT_LINE
@ -122,7 +122,7 @@ def parse_tag(template: str, l_del: str, r_del: str) -> tuple[tuple[str, str], s
r_del: The right delimiter.
Returns:
Tuple[Tuple[str, str], str]: The tag and the template.
tuple[tuple[str, str], str]: The tag and the template.
Raises:
ChevronError: If the tag is unclosed.

View File

@ -190,10 +190,10 @@ def pre_init(func: Callable) -> Any:
Args:
cls (Type[BaseModel]): The model class.
values (Dict[str, Any]): The values to initialize the model with.
values (dict[str, Any]): The values to initialize the model with.
Returns:
Dict[str, Any]: The values to initialize the model with.
dict[str, Any]: The values to initialize the model with.
"""
# Insert default values
fields = cls.model_fields

View File

@ -24,7 +24,7 @@ def xor_args(*arg_groups: tuple[str, ...]) -> Callable:
"""Validate specified keyword args are mutually exclusive.".
Args:
*arg_groups (Tuple[str, ...]): Groups of mutually exclusive keyword args.
*arg_groups (tuple[str, ...]): Groups of mutually exclusive keyword args.
Returns:
Callable: Decorator that validates the specified keyword args
@ -203,7 +203,7 @@ def get_pydantic_field_names(pydantic_cls: Any) -> set[str]:
pydantic_cls: Pydantic class.
Returns:
Set[str]: Field names.
set[str]: Field names.
"""
all_required_field_names = set()
if is_pydantic_v1_subclass(pydantic_cls):
@ -230,7 +230,7 @@ def _build_model_kwargs(
all_required_field_names: All required field names for the pydantic class.
Returns:
Dict[str, Any]: Extra kwargs.
dict[str, Any]: Extra kwargs.
Raises:
ValueError: If a field is specified in both values and extra_kwargs.
@ -278,7 +278,7 @@ def build_extra_kwargs(
all_required_field_names: All required field names for the pydantic class.
Returns:
Dict[str, Any]: Extra kwargs.
dict[str, Any]: Extra kwargs.
Raises:
ValueError: If a field is specified in both values and extra_kwargs.

View File

@ -164,7 +164,7 @@ class InMemoryVectorStore(VectorStore):
embedding: embedding function to use.
"""
# TODO: would be nice to change to
# Dict[str, Document] at some point (will be a breaking change)
# dict[str, Document] at some point (will be a breaking change)
self.store: dict[str, dict[str, Any]] = {}
self.embedding = embedding

View File

@ -869,7 +869,7 @@ def test_get_output_messages_with_value_error() -> None:
with pytest.raises(
ValueError,
match=re.escape(
"Expected str, BaseMessage, List[BaseMessage], or Tuple[BaseMessage]."
"Expected str, BaseMessage, list[BaseMessage], or tuple[BaseMessage]."
f" Got {illegal_bool_message}."
),
):
@ -882,7 +882,7 @@ def test_get_output_messages_with_value_error() -> None:
with pytest.raises(
ValueError,
match=re.escape(
"Expected str, BaseMessage, List[BaseMessage], or Tuple[BaseMessage]."
"Expected str, BaseMessage, list[BaseMessage], or tuple[BaseMessage]."
f" Got {illegal_int_message}."
),
):

View File

@ -305,7 +305,7 @@ def test_schemas(snapshot: SnapshotAssertion) -> None:
"type": "integer",
}
fake_ret = FakeRetriever() # str -> List[Document]
fake_ret = FakeRetriever() # str -> list[Document]
assert fake_ret.get_input_jsonschema() == {
"title": "FakeRetrieverInput",
@ -354,7 +354,7 @@ def test_schemas(snapshot: SnapshotAssertion) -> None:
"type": "array",
}
fake_llm = FakeListLLM(responses=["a"]) # str -> List[List[str]]
fake_llm = FakeListLLM(responses=["a"]) # str -> list[list[str]]
assert _schema(fake_llm.input_schema) == snapshot(name="fake_llm_input_schema")
assert _schema(fake_llm.output_schema) == {
@ -362,7 +362,7 @@ def test_schemas(snapshot: SnapshotAssertion) -> None:
"type": "string",
}
fake_chat = FakeListChatModel(responses=["a"]) # str -> List[List[str]]
fake_chat = FakeListChatModel(responses=["a"]) # str -> list[list[str]]
assert _schema(fake_chat.input_schema) == snapshot(name="fake_chat_input_schema")
assert _schema(fake_chat.output_schema) == snapshot(name="fake_chat_output_schema")
@ -508,9 +508,9 @@ def test_schemas(snapshot: SnapshotAssertion) -> None:
def test_passthrough_assign_schema() -> None:
retriever = FakeRetriever() # str -> List[Document]
retriever = FakeRetriever() # str -> list[Document]
prompt = PromptTemplate.from_template("{context} {question}")
fake_llm = FakeListLLM(responses=["a"]) # str -> List[List[str]]
fake_llm = FakeListLLM(responses=["a"]) # str -> list[list[str]]
seq_w_assign: Runnable = (
RunnablePassthrough.assign(context=itemgetter("question") | retriever)
@ -652,7 +652,7 @@ def test_lambda_schemas(snapshot: SnapshotAssertion) -> None:
def test_with_types_with_type_generics() -> None:
"""Verify that with_types works if we use things like List[int]."""
"""Verify that with_types works if we use things like list[int]."""
def foo(x: int) -> None:
"""Add one to the input."""
@ -746,7 +746,7 @@ def test_schema_complex_seq() -> None:
def test_configurable_fields(snapshot: SnapshotAssertion) -> None:
fake_llm = FakeListLLM(responses=["a"]) # str -> List[List[str]]
fake_llm = FakeListLLM(responses=["a"]) # str -> list[list[str]]
assert fake_llm.invoke("...") == "a"

View File

@ -1902,7 +1902,7 @@ async def test_runnable_with_message_history() -> None:
# where it re-instantiates a list, so mutating the list doesn't end up mutating
# the content in the store!
# Using Any type here rather than List[BaseMessage] due to pydantic issue!
# Using Any type here rather than list[BaseMessage] due to pydantic issue!
messages: Any
def add_message(self, message: BaseMessage) -> None:

View File

@ -1854,7 +1854,7 @@ async def test_runnable_with_message_history() -> None:
# where it re-instantiates a list, so mutating the list doesn't end up mutating
# the content in the store!
# Using Any type here rather than List[BaseMessage] due to pydantic issue!
# Using Any type here rather than list[BaseMessage] due to pydantic issue!
messages: Any
def add_message(self, message: BaseMessage) -> None:

View File

@ -460,7 +460,7 @@ def test_structured_tool_from_function_docstring_complex_args() -> None:
Args:
bar: int
baz: List[str]
baz: list[str]
"""
raise NotImplementedError
@ -2581,8 +2581,6 @@ def test_title_property_preserved() -> None:
https://github.com/langchain-ai/langchain/issues/30456
"""
from typing import Any
from langchain_core.tools import tool
schema_to_be_extracted = {