From 42944f34997a68cb2ae09cd5589d68bf32aaf0aa Mon Sep 17 00:00:00 2001 From: Christophe Bornet Date: Fri, 11 Apr 2025 22:35:13 +0200 Subject: [PATCH] core: Improve mypy config (#30737) * Cleanup mypy config * Add mypy `strict` rules except `disallow_any_generics`, `warn_return_any` and `strict_equality` (TODO) * Add mypy `strict_byte` rule * Add mypy support for PEP702 `@deprecated` decorator * Bump mypy version to 1.15 --------- Co-authored-by: Eugene Yurtsev --- .../langchain_core/_api/beta_decorator.py | 6 +- libs/core/langchain_core/_api/deprecation.py | 11 +-- libs/core/langchain_core/callbacks/manager.py | 2 +- .../langchain_core/document_loaders/base.py | 2 +- libs/core/langchain_core/embeddings/fake.py | 4 +- libs/core/langchain_core/globals.py | 12 +-- libs/core/langchain_core/indexing/api.py | 2 +- .../langchain_core/language_models/base.py | 2 +- .../language_models/chat_models.py | 4 +- .../langchain_core/language_models/llms.py | 10 +-- libs/core/langchain_core/messages/base.py | 6 +- libs/core/langchain_core/messages/utils.py | 4 +- .../langchain_core/output_parsers/json.py | 2 +- .../output_parsers/openai_functions.py | 2 +- .../core/langchain_core/outputs/llm_result.py | 2 +- libs/core/langchain_core/prompts/chat.py | 12 +-- libs/core/langchain_core/prompts/image.py | 2 +- libs/core/langchain_core/prompts/prompt.py | 2 +- .../langchain_core/pydantic_v1/__init__.py | 2 +- .../langchain_core/pydantic_v1/dataclasses.py | 2 +- libs/core/langchain_core/pydantic_v1/main.py | 2 +- libs/core/langchain_core/retrievers.py | 10 +-- libs/core/langchain_core/runnables/base.py | 40 +++++---- libs/core/langchain_core/runnables/branch.py | 2 +- .../langchain_core/runnables/fallbacks.py | 19 +++-- libs/core/langchain_core/runnables/graph.py | 2 +- .../langchain_core/runnables/graph_ascii.py | 6 +- .../langchain_core/runnables/graph_mermaid.py | 4 +- .../langchain_core/runnables/graph_png.py | 2 +- libs/core/langchain_core/runnables/history.py | 2 +- .../langchain_core/runnables/passthrough.py | 4 +- libs/core/langchain_core/runnables/retry.py | 3 +- libs/core/langchain_core/runnables/router.py | 4 +- libs/core/langchain_core/runnables/utils.py | 2 +- libs/core/langchain_core/tools/simple.py | 4 +- .../core/langchain_core/tracers/evaluation.py | 2 +- .../langchain_core/tracers/event_stream.py | 2 - .../core/langchain_core/tracers/log_stream.py | 4 +- .../langchain_core/utils/function_calling.py | 2 +- libs/core/langchain_core/utils/pydantic.py | 6 +- libs/core/pyproject.toml | 18 +++-- .../tests/benchmarks/test_async_callbacks.py | 2 +- libs/core/tests/benchmarks/test_imports.py | 2 +- .../callbacks/test_sync_callback_manager.py | 3 +- .../example_selectors/test_similarity.py | 10 +-- .../output_parsers/test_list_parser.py | 10 +-- .../output_parsers/test_openai_tools.py | 4 +- .../tests/unit_tests/prompts/test_chat.py | 30 +++---- .../tests/unit_tests/prompts/test_prompt.py | 2 +- .../unit_tests/runnables/test_fallbacks.py | 2 +- .../tests/unit_tests/runnables/test_graph.py | 5 +- .../unit_tests/runnables/test_imports.py | 4 +- .../unit_tests/runnables/test_runnable.py | 8 +- .../runnables/test_runnable_events_v2.py | 4 +- .../runnables/test_tracing_interops.py | 6 +- libs/core/tests/unit_tests/test_messages.py | 4 +- libs/core/tests/unit_tests/test_tools.py | 8 +- .../unit_tests/utils/test_function_calling.py | 40 ++++----- .../tests/unit_tests/utils/test_pydantic.py | 3 +- libs/core/uv.lock | 81 ++++++++----------- 60 files changed, 222 insertions(+), 237 deletions(-) diff --git a/libs/core/langchain_core/_api/beta_decorator.py b/libs/core/langchain_core/_api/beta_decorator.py index 0eab1d439eb..8fe8d2b81b8 100644 --- a/libs/core/langchain_core/_api/beta_decorator.py +++ b/libs/core/langchain_core/_api/beta_decorator.py @@ -225,10 +225,8 @@ def beta( new_doc = f".. beta::\n {details}\n\n{old_doc}\n" if inspect.iscoroutinefunction(obj): - finalized = finalize(awarning_emitting_wrapper, new_doc) - else: - finalized = finalize(warning_emitting_wrapper, new_doc) - return cast("T", finalized) + return finalize(awarning_emitting_wrapper, new_doc) + return finalize(warning_emitting_wrapper, new_doc) return beta diff --git a/libs/core/langchain_core/_api/deprecation.py b/libs/core/langchain_core/_api/deprecation.py index 7964471c220..fcbfc58b38f 100644 --- a/libs/core/langchain_core/_api/deprecation.py +++ b/libs/core/langchain_core/_api/deprecation.py @@ -152,7 +152,10 @@ def deprecated( _package: str = package, ) -> T: """Implementation of the decorator returned by `deprecated`.""" - from langchain_core.utils.pydantic import FieldInfoV1, FieldInfoV2 + from langchain_core.utils.pydantic import ( # type: ignore[attr-defined] + FieldInfoV1, + FieldInfoV2, + ) def emit_warning() -> None: """Emit the warning.""" @@ -395,10 +398,8 @@ def deprecated( """ if inspect.iscoroutinefunction(obj): - finalized = finalize(awarning_emitting_wrapper, new_doc) - else: - finalized = finalize(warning_emitting_wrapper, new_doc) - return cast("T", finalized) + return finalize(awarning_emitting_wrapper, new_doc) + return finalize(warning_emitting_wrapper, new_doc) return deprecate diff --git a/libs/core/langchain_core/callbacks/manager.py b/libs/core/langchain_core/callbacks/manager.py index 8c721252485..1220ed8103b 100644 --- a/libs/core/langchain_core/callbacks/manager.py +++ b/libs/core/langchain_core/callbacks/manager.py @@ -2408,7 +2408,7 @@ def _configure( run_tree.trace_id, run_tree.dotted_order, ) - handler.run_map[str(run_tree.id)] = cast("Run", run_tree) + handler.run_map[str(run_tree.id)] = run_tree for var, inheritable, handler_class, env_var in _configure_hooks: create_one = ( env_var is not None diff --git a/libs/core/langchain_core/document_loaders/base.py b/libs/core/langchain_core/document_loaders/base.py index d889fbec8f9..f76edc873fe 100644 --- a/libs/core/langchain_core/document_loaders/base.py +++ b/libs/core/langchain_core/document_loaders/base.py @@ -80,7 +80,7 @@ class BaseLoader(ABC): # noqa: B024 iterator = await run_in_executor(None, self.lazy_load) done = object() while True: - doc = await run_in_executor(None, next, iterator, done) # type: ignore[call-arg, arg-type] + doc = await run_in_executor(None, next, iterator, done) if doc is done: break yield doc # type: ignore[misc] diff --git a/libs/core/langchain_core/embeddings/fake.py b/libs/core/langchain_core/embeddings/fake.py index d11ea9bd30d..d788416d9c2 100644 --- a/libs/core/langchain_core/embeddings/fake.py +++ b/libs/core/langchain_core/embeddings/fake.py @@ -52,7 +52,7 @@ class FakeEmbeddings(Embeddings, BaseModel): """The size of the embedding vector.""" def _get_embedding(self) -> list[float]: - import numpy as np # type: ignore[import-not-found, import-untyped] + import numpy as np return list(np.random.default_rng().normal(size=self.size)) @@ -109,7 +109,7 @@ class DeterministicFakeEmbedding(Embeddings, BaseModel): """The size of the embedding vector.""" def _get_embedding(self, seed: int) -> list[float]: - import numpy as np # type: ignore[import-not-found, import-untyped] + import numpy as np # set the seed for the random generator rng = np.random.default_rng(seed) diff --git a/libs/core/langchain_core/globals.py b/libs/core/langchain_core/globals.py index c30cdf88048..77375a2d076 100644 --- a/libs/core/langchain_core/globals.py +++ b/libs/core/langchain_core/globals.py @@ -23,7 +23,7 @@ def set_verbose(value: bool) -> None: # noqa: FBT001 value: The new value for the `verbose` global setting. """ try: - import langchain # type: ignore[import] + import langchain # type: ignore[import-not-found] # We're about to run some deprecated code, don't report warnings from it. # The user called the correct (non-deprecated) code path and shouldn't get @@ -57,7 +57,7 @@ def get_verbose() -> bool: The value of the `verbose` global setting. """ try: - import langchain # type: ignore[import] + import langchain # We're about to run some deprecated code, don't report warnings from it. # The user called the correct (non-deprecated) code path and shouldn't get @@ -96,7 +96,7 @@ def set_debug(value: bool) -> None: # noqa: FBT001 value: The new value for the `debug` global setting. """ try: - import langchain # type: ignore[import] + import langchain # We're about to run some deprecated code, don't report warnings from it. # The user called the correct (non-deprecated) code path and shouldn't get @@ -128,7 +128,7 @@ def get_debug() -> bool: The value of the `debug` global setting. """ try: - import langchain # type: ignore[import] + import langchain # We're about to run some deprecated code, don't report warnings from it. # The user called the correct (non-deprecated) code path and shouldn't get @@ -164,7 +164,7 @@ def set_llm_cache(value: Optional["BaseCache"]) -> None: value: The new LLM cache to use. If `None`, the LLM cache is disabled. """ try: - import langchain # type: ignore[import] + import langchain # We're about to run some deprecated code, don't report warnings from it. # The user called the correct (non-deprecated) code path and shouldn't get @@ -198,7 +198,7 @@ def get_llm_cache() -> "BaseCache": The value of the `llm_cache` global setting. """ try: - import langchain # type: ignore[import] + import langchain # We're about to run some deprecated code, don't report warnings from it. # The user called the correct (non-deprecated) code path and shouldn't get diff --git a/libs/core/langchain_core/indexing/api.py b/libs/core/langchain_core/indexing/api.py index df7dfa209e7..18907182b39 100644 --- a/libs/core/langchain_core/indexing/api.py +++ b/libs/core/langchain_core/indexing/api.py @@ -394,7 +394,7 @@ def index( if cleanup == "scoped_full": scoped_full_cleanup_source_ids.add(source_id) # source ids cannot be None after for loop above. - source_ids = cast("Sequence[str]", source_ids) # type: ignore[assignment] + source_ids = cast("Sequence[str]", source_ids) exists_batch = record_manager.exists([doc.uid for doc in hashed_docs]) diff --git a/libs/core/langchain_core/language_models/base.py b/libs/core/langchain_core/language_models/base.py index f86c93882e1..a4d3442d8cc 100644 --- a/libs/core/langchain_core/language_models/base.py +++ b/libs/core/langchain_core/language_models/base.py @@ -61,7 +61,7 @@ def get_tokenizer() -> Any: every time it is called. """ try: - from transformers import GPT2TokenizerFast # type: ignore[import] + from transformers import GPT2TokenizerFast # type: ignore[import-not-found] except ImportError as e: msg = ( "Could not import transformers python package. " diff --git a/libs/core/langchain_core/language_models/chat_models.py b/libs/core/langchain_core/language_models/chat_models.py index 0bf400361cc..a62e06f460a 100644 --- a/libs/core/langchain_core/language_models/chat_models.py +++ b/libs/core/langchain_core/language_models/chat_models.py @@ -853,7 +853,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC): run_manager.on_llm_end( LLMResult( generations=[res.generations], # type: ignore[list-item, union-attr] - llm_output=res.llm_output, # type: ignore[list-item, union-attr] + llm_output=res.llm_output, # type: ignore[union-attr] ) ) for run_manager, res in zip(run_managers, results) @@ -1107,7 +1107,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC): None, next, iterator, - done, # type: ignore[call-arg, arg-type] + done, ) if item is done: break diff --git a/libs/core/langchain_core/language_models/llms.py b/libs/core/langchain_core/language_models/llms.py index 40f05dbf961..87f3974ae1c 100644 --- a/libs/core/langchain_core/language_models/llms.py +++ b/libs/core/langchain_core/language_models/llms.py @@ -455,7 +455,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): inputs[i : i + max_concurrency] for i in range(0, len(inputs), max_concurrency) ] - config = [{**c, "max_concurrency": None} for c in config] # type: ignore[misc] + config = [{**c, "max_concurrency": None} for c in config] return [ output for i, batch in enumerate(batches) @@ -501,7 +501,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): inputs[i : i + max_concurrency] for i in range(0, len(inputs), max_concurrency) ] - config = [{**c, "max_concurrency": None} for c in config] # type: ignore[misc] + config = [{**c, "max_concurrency": None} for c in config] return [ output for i, batch in enumerate(batches) @@ -746,7 +746,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): None, next, iterator, - done, # type: ignore[call-arg, arg-type] + done, ) if item is done: break @@ -1231,7 +1231,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): stop, run_managers, # type: ignore[arg-type] new_arg_supported=bool(new_arg_supported), - **kwargs, # type: ignore[arg-type] + **kwargs, ) if len(missing_prompts) > 0: run_managers = await asyncio.gather( @@ -1253,7 +1253,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): stop, run_managers, # type: ignore[arg-type] new_arg_supported=bool(new_arg_supported), - **kwargs, # type: ignore[arg-type] + **kwargs, ) llm_output = await aupdate_cache( self.cache, diff --git a/libs/core/langchain_core/messages/base.py b/libs/core/langchain_core/messages/base.py index f68c49739da..58049d28e9e 100644 --- a/libs/core/langchain_core/messages/base.py +++ b/libs/core/langchain_core/messages/base.py @@ -119,7 +119,7 @@ class BaseMessage(Serializable): """Concatenate this message with another message.""" from langchain_core.prompts.chat import ChatPromptTemplate - prompt = ChatPromptTemplate(messages=[self]) # type: ignore[call-arg] + prompt = ChatPromptTemplate(messages=[self]) return prompt + other def pretty_repr( @@ -165,7 +165,7 @@ def merge_content( if isinstance(merged, str): # If the next chunk is also a string, then merge them naively if isinstance(content, str): - merged = cast("str", merged) + content + merged += content # If the next chunk is a list, add the current to the start of the list else: merged = [merged] + content # type: ignore[assignment,operator] @@ -215,7 +215,7 @@ class BaseMessageChunk(BaseMessage): # If both are (subclasses of) BaseMessageChunk, # concat into a single BaseMessageChunk - return self.__class__( # type: ignore[call-arg] + return self.__class__( id=self.id, type=self.type, content=merge_content(self.content, other.content), diff --git a/libs/core/langchain_core/messages/utils.py b/libs/core/langchain_core/messages/utils.py index b16a7521d37..92a658afd23 100644 --- a/libs/core/langchain_core/messages/utils.py +++ b/libs/core/langchain_core/messages/utils.py @@ -237,7 +237,7 @@ def _create_message_from_message_type( if additional_kwargs: if response_metadata := additional_kwargs.pop("response_metadata", None): kwargs["response_metadata"] = response_metadata - kwargs["additional_kwargs"] = additional_kwargs # type: ignore[assignment] + kwargs["additional_kwargs"] = additional_kwargs additional_kwargs.update(additional_kwargs.pop("additional_kwargs", {})) if id is not None: kwargs["id"] = id @@ -889,7 +889,7 @@ def trim_messages( return sum(token_counter(msg) for msg in messages) # type: ignore[arg-type, misc] else: - list_token_counter = token_counter # type: ignore[assignment] + list_token_counter = token_counter else: msg = ( f"'token_counter' expected to be a model that implements " diff --git a/libs/core/langchain_core/output_parsers/json.py b/libs/core/langchain_core/output_parsers/json.py index 70fda7aac89..84f510f331d 100644 --- a/libs/core/langchain_core/output_parsers/json.py +++ b/libs/core/langchain_core/output_parsers/json.py @@ -6,7 +6,7 @@ import json from json import JSONDecodeError from typing import Annotated, Any, Optional, TypeVar, Union -import jsonpatch # type: ignore[import] +import jsonpatch # type: ignore[import-untyped] import pydantic from pydantic import SkipValidation diff --git a/libs/core/langchain_core/output_parsers/openai_functions.py b/libs/core/langchain_core/output_parsers/openai_functions.py index 1135ccf0df6..f71b2c8044b 100644 --- a/libs/core/langchain_core/output_parsers/openai_functions.py +++ b/libs/core/langchain_core/output_parsers/openai_functions.py @@ -5,7 +5,7 @@ import json from types import GenericAlias from typing import Any, Optional, Union -import jsonpatch # type: ignore[import] +import jsonpatch # type: ignore[import-untyped] from pydantic import BaseModel, model_validator from typing_extensions import override diff --git a/libs/core/langchain_core/outputs/llm_result.py b/libs/core/langchain_core/outputs/llm_result.py index 163b4f96cc0..fb72f6404db 100644 --- a/libs/core/langchain_core/outputs/llm_result.py +++ b/libs/core/langchain_core/outputs/llm_result.py @@ -50,7 +50,7 @@ class LLMResult(BaseModel): run: Optional[list[RunInfo]] = None """List of metadata info for model call for each input.""" - type: Literal["LLMResult"] = "LLMResult" # type: ignore[assignment] + type: Literal["LLMResult"] = "LLMResult" """Type is used exclusively for serialization purposes.""" def flatten(self) -> list[LLMResult]: diff --git a/libs/core/langchain_core/prompts/chat.py b/libs/core/langchain_core/prompts/chat.py index 720d7db6942..10f24937a9d 100644 --- a/libs/core/langchain_core/prompts/chat.py +++ b/libs/core/langchain_core/prompts/chat.py @@ -129,7 +129,7 @@ class BaseMessagePromptTemplate(Serializable, ABC): Returns: Combined prompt template. """ - prompt = ChatPromptTemplate(messages=[self]) # type: ignore[call-arg] + prompt = ChatPromptTemplate(messages=[self]) return prompt + other @@ -1033,23 +1033,23 @@ class ChatPromptTemplate(BaseChatPromptTemplate): if isinstance(other, ChatPromptTemplate): return ChatPromptTemplate(messages=self.messages + other.messages).partial( **partials - ) # type: ignore[call-arg] + ) if isinstance( other, (BaseMessagePromptTemplate, BaseMessage, BaseChatPromptTemplate) ): return ChatPromptTemplate(messages=self.messages + [other]).partial( **partials - ) # type: ignore[call-arg] + ) if isinstance(other, (list, tuple)): _other = ChatPromptTemplate.from_messages(other) return ChatPromptTemplate(messages=self.messages + _other.messages).partial( **partials - ) # type: ignore[call-arg] + ) if isinstance(other, str): prompt = HumanMessagePromptTemplate.from_template(other) return ChatPromptTemplate(messages=self.messages + [prompt]).partial( **partials - ) # type: ignore[call-arg] + ) msg = f"Unsupported operand type for +: {type(other)}" raise NotImplementedError(msg) @@ -1138,7 +1138,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate): Returns: a chat prompt template. """ - return cls( # type: ignore[call-arg] + return cls( messages=[ ChatMessagePromptTemplate.from_template(template, role=role) for role, template in string_messages diff --git a/libs/core/langchain_core/prompts/image.py b/libs/core/langchain_core/prompts/image.py index a648c6afc92..525d2941a92 100644 --- a/libs/core/langchain_core/prompts/image.py +++ b/libs/core/langchain_core/prompts/image.py @@ -116,7 +116,7 @@ class ImagePromptTemplate(BasePromptTemplate[ImageURL]): output: ImageURL = {"url": url} if detail: # Don't check literal values here: let the API check them - output["detail"] = detail # type: ignore[typeddict-item] + output["detail"] = detail return output async def aformat(self, **kwargs: Any) -> ImageURL: diff --git a/libs/core/langchain_core/prompts/prompt.py b/libs/core/langchain_core/prompts/prompt.py index a9fdee95c40..190b0aab3ab 100644 --- a/libs/core/langchain_core/prompts/prompt.py +++ b/libs/core/langchain_core/prompts/prompt.py @@ -297,7 +297,7 @@ class PromptTemplate(StringPromptTemplate): return cls( input_variables=input_variables, template=template, - template_format=template_format, # type: ignore[arg-type] + template_format=template_format, partial_variables=_partial_variables, **kwargs, ) diff --git a/libs/core/langchain_core/pydantic_v1/__init__.py b/libs/core/langchain_core/pydantic_v1/__init__.py index c11a835dae7..e4dfbbaea19 100644 --- a/libs/core/langchain_core/pydantic_v1/__init__.py +++ b/libs/core/langchain_core/pydantic_v1/__init__.py @@ -18,7 +18,7 @@ from langchain_core._api.deprecation import warn_deprecated try: from pydantic.v1 import * # noqa: F403 except ImportError: - from pydantic import * # noqa: F403 + from pydantic import * # type: ignore[assignment,no-redef] # noqa: F403 try: diff --git a/libs/core/langchain_core/pydantic_v1/dataclasses.py b/libs/core/langchain_core/pydantic_v1/dataclasses.py index 7d795991f18..812660578bd 100644 --- a/libs/core/langchain_core/pydantic_v1/dataclasses.py +++ b/libs/core/langchain_core/pydantic_v1/dataclasses.py @@ -5,7 +5,7 @@ from langchain_core._api import warn_deprecated try: from pydantic.v1.dataclasses import * # noqa: F403 except ImportError: - from pydantic.dataclasses import * # noqa: F403 + from pydantic.dataclasses import * # type: ignore[no-redef] # noqa: F403 warn_deprecated( "0.3.0", diff --git a/libs/core/langchain_core/pydantic_v1/main.py b/libs/core/langchain_core/pydantic_v1/main.py index 7f89181bf1b..88492823173 100644 --- a/libs/core/langchain_core/pydantic_v1/main.py +++ b/libs/core/langchain_core/pydantic_v1/main.py @@ -5,7 +5,7 @@ from langchain_core._api import warn_deprecated try: from pydantic.v1.main import * # noqa: F403 except ImportError: - from pydantic.main import * # noqa: F403 + from pydantic.main import * # type: ignore[assignment,no-redef] # noqa: F403 warn_deprecated( "0.3.0", diff --git a/libs/core/langchain_core/retrievers.py b/libs/core/langchain_core/retrievers.py index 61644d830eb..3456cc3e02c 100644 --- a/libs/core/langchain_core/retrievers.py +++ b/libs/core/langchain_core/retrievers.py @@ -30,6 +30,7 @@ from pydantic import ConfigDict from typing_extensions import Self, TypedDict, override from langchain_core._api import deprecated +from langchain_core.callbacks import Callbacks from langchain_core.documents import Document from langchain_core.runnables import ( Runnable, @@ -43,7 +44,6 @@ if TYPE_CHECKING: from langchain_core.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, CallbackManagerForRetrieverRun, - Callbacks, ) RetrieverInput = str @@ -160,10 +160,10 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC): stacklevel=4, ) swap = cls.get_relevant_documents - cls.get_relevant_documents = ( # type: ignore[assignment] + cls.get_relevant_documents = ( # type: ignore[method-assign] BaseRetriever.get_relevant_documents ) - cls._get_relevant_documents = swap # type: ignore[assignment] + cls._get_relevant_documents = swap # type: ignore[method-assign] if ( hasattr(cls, "aget_relevant_documents") and cls.aget_relevant_documents != BaseRetriever.aget_relevant_documents @@ -175,10 +175,10 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC): stacklevel=4, ) aswap = cls.aget_relevant_documents - cls.aget_relevant_documents = ( # type: ignore[assignment] + cls.aget_relevant_documents = ( # type: ignore[method-assign] BaseRetriever.aget_relevant_documents ) - cls._aget_relevant_documents = aswap # type: ignore[assignment] + cls._aget_relevant_documents = aswap # type: ignore[method-assign] parameters = signature(cls._get_relevant_documents).parameters cls._new_arg_supported = parameters.get("run_manager") is not None if ( diff --git a/libs/core/langchain_core/runnables/base.py b/libs/core/langchain_core/runnables/base.py index 05bfeb355d7..272e9a4eb47 100644 --- a/libs/core/langchain_core/runnables/base.py +++ b/libs/core/langchain_core/runnables/base.py @@ -489,7 +489,7 @@ class Runnable(Generic[Input, Output], ABC): include = include or [] config_specs = self.config_specs configurable = ( - create_model_v2( # type: ignore[call-overload] + create_model_v2( "Configurable", field_definitions={ spec.id: ( @@ -514,9 +514,7 @@ class Runnable(Generic[Input, Output], ABC): if field_name in [i for i in include if i != "configurable"] }, } - return create_model_v2( # type: ignore[call-overload] - self.get_name("Config"), field_definitions=all_fields - ) + return create_model_v2(self.get_name("Config"), field_definitions=all_fields) def get_config_jsonschema( self, *, include: Optional[Sequence[str]] = None @@ -1543,7 +1541,7 @@ class Runnable(Generic[Input, Output], ABC): config=cast( "RunnableConfig", {**(config or {}), **kwargs}, - ), # type: ignore[misc] + ), kwargs={}, ) @@ -1931,8 +1929,8 @@ class Runnable(Generic[Input, Output], ABC): "Output", context.run( call_func_with_variable_args, # type: ignore[arg-type] - func, # type: ignore[arg-type] - input, # type: ignore[arg-type] + func, + input, config, run_manager, **kwargs, @@ -2194,7 +2192,7 @@ class Runnable(Generic[Input, Output], ABC): cast("_StreamingCallbackHandler", h) for h in run_manager.handlers # instance check OK here, it's a mixin - if isinstance(h, _StreamingCallbackHandler) # type: ignore[misc] + if isinstance(h, _StreamingCallbackHandler) ), None, ): @@ -2299,7 +2297,7 @@ class Runnable(Generic[Input, Output], ABC): cast("_StreamingCallbackHandler", h) for h in run_manager.handlers # instance check OK here, it's a mixin - if isinstance(h, _StreamingCallbackHandler) # type: ignore[misc] + if isinstance(h, _StreamingCallbackHandler) ), None, ): @@ -2318,7 +2316,7 @@ class Runnable(Generic[Input, Output], ABC): final_output = chunk else: try: - final_output = final_output + chunk # type: ignore[operator] + final_output = final_output + chunk except TypeError: final_output = chunk final_output_supported = False @@ -2601,7 +2599,7 @@ def _seq_input_schema( next_input_schema = _seq_input_schema(steps[1:], config) if not issubclass(next_input_schema, RootModel): # it's a dict as expected - return create_model_v2( # type: ignore[call-overload] + return create_model_v2( "RunnableSequenceInput", field_definitions={ k: (v.annotation, v.default) @@ -2628,7 +2626,7 @@ def _seq_output_schema( prev_output_schema = _seq_output_schema(steps[:-1], config) if not issubclass(prev_output_schema, RootModel): # it's a dict as expected - return create_model_v2( # type: ignore[call-overload] + return create_model_v2( "RunnableSequenceOutput", field_definitions={ **{ @@ -2646,7 +2644,7 @@ def _seq_output_schema( if not issubclass(prev_output_schema, RootModel): # it's a dict as expected if isinstance(last.keys, list): - return create_model_v2( # type: ignore[call-overload] + return create_model_v2( "RunnableSequenceOutput", field_definitions={ k: (v.annotation, v.default) @@ -2655,7 +2653,7 @@ def _seq_output_schema( }, ) field = prev_output_schema.model_fields[last.keys] - return create_model_v2( # type: ignore[call-overload] + return create_model_v2( "RunnableSequenceOutput", root=(field.annotation, field.default) ) @@ -3625,7 +3623,7 @@ class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]): for s in self.steps__.values() ): # This is correct, but pydantic typings/mypy don't think so. - return create_model_v2( # type: ignore[call-overload] + return create_model_v2( self.get_name("Input"), field_definitions={ k: (v.annotation, v.default) @@ -4084,7 +4082,7 @@ class RunnableGenerator(Runnable[Input, Output]): func_for_name: Callable = atransform if is_async_generator(transform): - self._atransform = transform # type: ignore[assignment] + self._atransform = transform func_for_name = transform elif inspect.isgeneratorfunction(transform): self._transform = transform @@ -4211,7 +4209,7 @@ class RunnableGenerator(Runnable[Input, Output]): input, self._transform, # type: ignore[arg-type] config, - **kwargs, # type: ignore[arg-type] + **kwargs, ) @override @@ -4815,7 +4813,7 @@ class RunnableLambda(Runnable[Input, Output]): if inspect.isgeneratorfunction(self.func): output: Optional[Output] = None for chunk in call_func_with_variable_args( - self.func, cast("Input", final), config, run_manager, **kwargs + self.func, final, config, run_manager, **kwargs ): yield chunk if output is None: @@ -4827,7 +4825,7 @@ class RunnableLambda(Runnable[Input, Output]): output = chunk else: output = call_func_with_variable_args( - self.func, cast("Input", final), config, run_manager, **kwargs + self.func, final, config, run_manager, **kwargs ) # If the output is a Runnable, use its stream output @@ -4936,7 +4934,7 @@ class RunnableLambda(Runnable[Input, Output]): "AsyncIterator[Output]", acall_func_with_variable_args( cast("Callable", afunc), - cast("Input", final), + final, config, run_manager, **kwargs, @@ -4953,7 +4951,7 @@ class RunnableLambda(Runnable[Input, Output]): else: output = await acall_func_with_variable_args( cast("Callable", afunc), - cast("Input", final), + final, config, run_manager, **kwargs, diff --git a/libs/core/langchain_core/runnables/branch.py b/libs/core/langchain_core/runnables/branch.py index fad0480aef6..686426e9e79 100644 --- a/libs/core/langchain_core/runnables/branch.py +++ b/libs/core/langchain_core/runnables/branch.py @@ -114,7 +114,7 @@ class RunnableBranch(RunnableSerializable[Input, Output]): _branches = [] for branch in branches[:-1]: - if not isinstance(branch, (tuple, list)): # type: ignore[arg-type] + if not isinstance(branch, (tuple, list)): msg = ( f"RunnableBranch branches must be " f"tuples or lists, not {type(branch)}" diff --git a/libs/core/langchain_core/runnables/fallbacks.py b/libs/core/langchain_core/runnables/fallbacks.py index e3ef9c057c2..2150b00beba 100644 --- a/libs/core/langchain_core/runnables/fallbacks.py +++ b/libs/core/langchain_core/runnables/fallbacks.py @@ -10,7 +10,6 @@ from typing import ( Any, Optional, Union, - cast, ) from pydantic import BaseModel, ConfigDict @@ -183,7 +182,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): for runnable in self.runnables: try: if self.exception_key and last_error is not None: - input[self.exception_key] = last_error + input[self.exception_key] = last_error # type: ignore[index] child_config = patch_config(config, callbacks=run_manager.get_child()) with set_config_context(child_config) as context: output = context.run( @@ -237,7 +236,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): for runnable in self.runnables: try: if self.exception_key and last_error is not None: - input[self.exception_key] = last_error + input[self.exception_key] = last_error # type: ignore[index] child_config = patch_config(config, callbacks=run_manager.get_child()) with set_config_context(child_config) as context: coro = context.run(runnable.ainvoke, input, config, **kwargs) @@ -328,12 +327,12 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): if not return_exceptions: first_to_raise = first_to_raise or output else: - handled_exceptions[i] = cast("BaseException", output) + handled_exceptions[i] = output run_again.pop(i) elif isinstance(output, self.exceptions_to_handle): if self.exception_key: input[self.exception_key] = output # type: ignore[index] - handled_exceptions[i] = cast("BaseException", output) + handled_exceptions[i] = output else: run_managers[i].on_chain_end(output) to_return[i] = output @@ -425,12 +424,12 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): if not return_exceptions: first_to_raise = first_to_raise or output else: - handled_exceptions[i] = cast("BaseException", output) + handled_exceptions[i] = output run_again.pop(i) elif isinstance(output, self.exceptions_to_handle): if self.exception_key: input[self.exception_key] = output # type: ignore[index] - handled_exceptions[i] = cast("BaseException", output) + handled_exceptions[i] = output else: to_return[i] = output await run_managers[i].on_chain_end(output) @@ -482,7 +481,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): for runnable in self.runnables: try: if self.exception_key and last_error is not None: - input[self.exception_key] = last_error + input[self.exception_key] = last_error # type: ignore[index] child_config = patch_config(config, callbacks=run_manager.get_child()) with set_config_context(child_config) as context: stream = context.run( @@ -546,7 +545,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): for runnable in self.runnables: try: if self.exception_key and last_error is not None: - input[self.exception_key] = last_error + input[self.exception_key] = last_error # type: ignore[index] child_config = patch_config(config, callbacks=run_manager.get_child()) with set_config_context(child_config) as context: stream = runnable.astream( @@ -574,7 +573,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): async for chunk in stream: yield chunk try: - output = output + chunk # type: ignore[operator] + output = output + chunk except TypeError: output = None except BaseException as e: diff --git a/libs/core/langchain_core/runnables/graph.py b/libs/core/langchain_core/runnables/graph.py index 47fb550290b..20997e52126 100644 --- a/libs/core/langchain_core/runnables/graph.py +++ b/libs/core/langchain_core/runnables/graph.py @@ -288,7 +288,7 @@ class Graph: "target": stable_node_ids[edge.target], } if edge.data is not None: - edge_dict["data"] = edge.data + edge_dict["data"] = edge.data # type: ignore[assignment] if edge.conditional: edge_dict["conditional"] = True edges.append(edge_dict) diff --git a/libs/core/langchain_core/runnables/graph_ascii.py b/libs/core/langchain_core/runnables/graph_ascii.py index 723042950c5..9f0b40c3632 100644 --- a/libs/core/langchain_core/runnables/graph_ascii.py +++ b/libs/core/langchain_core/runnables/graph_ascii.py @@ -167,9 +167,9 @@ def _build_sugiyama_layout( vertices: Mapping[str, str], edges: Sequence[LangEdge] ) -> Any: try: - from grandalf.graphs import Edge, Graph, Vertex # type: ignore[import] - from grandalf.layouts import SugiyamaLayout # type: ignore[import] - from grandalf.routing import ( # type: ignore[import] + from grandalf.graphs import Edge, Graph, Vertex # type: ignore[import-untyped] + from grandalf.layouts import SugiyamaLayout # type: ignore[import-untyped] + from grandalf.routing import ( # type: ignore[import-untyped] EdgeViewer, route_with_lines, ) diff --git a/libs/core/langchain_core/runnables/graph_mermaid.py b/libs/core/langchain_core/runnables/graph_mermaid.py index 028f2b2736d..c47359e29a0 100644 --- a/libs/core/langchain_core/runnables/graph_mermaid.py +++ b/libs/core/langchain_core/runnables/graph_mermaid.py @@ -305,7 +305,7 @@ async def _render_mermaid_using_pyppeteer( ) -> bytes: """Renders Mermaid graph using Pyppeteer.""" try: - from pyppeteer import launch # type: ignore[import] + from pyppeteer import launch # type: ignore[import-not-found] except ImportError as e: msg = "Install Pyppeteer to use the Pyppeteer method: `pip install pyppeteer`." raise ImportError(msg) from e @@ -377,7 +377,7 @@ def _render_mermaid_using_api( ) -> bytes: """Renders Mermaid graph using the Mermaid.INK API.""" try: - import requests # type: ignore[import] + import requests except ImportError as e: msg = ( "Install the `requests` module to use the Mermaid.INK API: " diff --git a/libs/core/langchain_core/runnables/graph_png.py b/libs/core/langchain_core/runnables/graph_png.py index 2c2663c4e89..504236188ce 100644 --- a/libs/core/langchain_core/runnables/graph_png.py +++ b/libs/core/langchain_core/runnables/graph_png.py @@ -135,7 +135,7 @@ class PngDrawer: :param output_path: The path to save the PNG. If None, PNG bytes are returned. """ try: - import pygraphviz as pgv # type: ignore[import] + import pygraphviz as pgv # type: ignore[import-not-found] except ImportError as exc: msg = "Install pygraphviz to draw graphs: `pip install pygraphviz`." raise ImportError(msg) from exc diff --git a/libs/core/langchain_core/runnables/history.py b/libs/core/langchain_core/runnables/history.py index 3d22cc4d9ff..4c553f0f4a8 100644 --- a/libs/core/langchain_core/runnables/history.py +++ b/libs/core/langchain_core/runnables/history.py @@ -388,7 +388,7 @@ class RunnableWithMessageHistory(RunnableBindingBase): module_name=self.__class__.__module__, root=(Sequence[BaseMessage], ...), ) - return create_model_v2( # type: ignore[call-overload] + return create_model_v2( "RunnableWithChatHistoryInput", field_definitions=fields, module_name=self.__class__.__module__, diff --git a/libs/core/langchain_core/runnables/passthrough.py b/libs/core/langchain_core/runnables/passthrough.py index 39edb6d2410..098ce791af9 100644 --- a/libs/core/langchain_core/runnables/passthrough.py +++ b/libs/core/langchain_core/runnables/passthrough.py @@ -455,9 +455,7 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]): for name, field_info in map_output_schema.model_fields.items(): fields[name] = (field_info.annotation, field_info.default) - return create_model_v2( # type: ignore[call-overload] - "RunnableAssignOutput", field_definitions=fields - ) + return create_model_v2("RunnableAssignOutput", field_definitions=fields) if not issubclass(map_output_schema, RootModel): # ie. only map output is a dict # ie. input type is either unknown or inferred incorrectly diff --git a/libs/core/langchain_core/runnables/retry.py b/libs/core/langchain_core/runnables/retry.py index 4ef52f8b736..3ceb396ff28 100644 --- a/libs/core/langchain_core/runnables/retry.py +++ b/libs/core/langchain_core/runnables/retry.py @@ -20,8 +20,9 @@ from tenacity import ( ) from typing_extensions import TypedDict, override -from langchain_core.runnables.base import Input, Output, RunnableBindingBase +from langchain_core.runnables.base import RunnableBindingBase from langchain_core.runnables.config import RunnableConfig, patch_config +from langchain_core.runnables.utils import Input, Output if TYPE_CHECKING: from langchain_core.callbacks.manager import ( diff --git a/libs/core/langchain_core/runnables/router.py b/libs/core/langchain_core/runnables/router.py index 9d57eb76b22..192c6239cef 100644 --- a/libs/core/langchain_core/runnables/router.py +++ b/libs/core/langchain_core/runnables/router.py @@ -17,8 +17,6 @@ from pydantic import ConfigDict from typing_extensions import TypedDict, override from langchain_core.runnables.base import ( - Input, - Output, Runnable, RunnableSerializable, coerce_to_runnable, @@ -30,6 +28,8 @@ from langchain_core.runnables.config import ( ) from langchain_core.runnables.utils import ( ConfigurableFieldSpec, + Input, + Output, gather_with_concurrency, get_unique_config_specs, ) diff --git a/libs/core/langchain_core/runnables/utils.py b/libs/core/langchain_core/runnables/utils.py index 0172c63b80b..c0fdeb96ae0 100644 --- a/libs/core/langchain_core/runnables/utils.py +++ b/libs/core/langchain_core/runnables/utils.py @@ -141,7 +141,7 @@ def coro_with_context( The coroutine with the context. """ if asyncio_accepts_context(): - return asyncio.create_task(coro, context=context) # type: ignore[arg-type,call-arg] + return asyncio.create_task(coro, context=context) # type: ignore[arg-type,call-arg,unused-ignore] if create_task: return asyncio.create_task(coro) # type: ignore[arg-type] return coro diff --git a/libs/core/langchain_core/tools/simple.py b/libs/core/langchain_core/tools/simple.py index e450dafc58c..e0062368f3b 100644 --- a/libs/core/langchain_core/tools/simple.py +++ b/libs/core/langchain_core/tools/simple.py @@ -132,9 +132,7 @@ class Tool(BaseTool): self, name: str, func: Optional[Callable], description: str, **kwargs: Any ) -> None: """Initialize tool.""" - super().__init__( # type: ignore[call-arg] - name=name, func=func, description=description, **kwargs - ) + super().__init__(name=name, func=func, description=description, **kwargs) @classmethod def from_function( diff --git a/libs/core/langchain_core/tracers/evaluation.py b/libs/core/langchain_core/tracers/evaluation.py index 6595aa6141a..c7447ad4d81 100644 --- a/libs/core/langchain_core/tracers/evaluation.py +++ b/libs/core/langchain_core/tracers/evaluation.py @@ -162,7 +162,7 @@ class EvaluatorCallbackHandler(BaseTracer): if isinstance(results, EvaluationResult): results_ = [results] elif isinstance(results, dict) and "results" in results: - results_ = cast("list[EvaluationResult]", results["results"]) + results_ = results["results"] else: msg = ( f"Invalid evaluation result type {type(results)}." diff --git a/libs/core/langchain_core/tracers/event_stream.py b/libs/core/langchain_core/tracers/event_stream.py index 54621f96ccc..6cc9a65487c 100644 --- a/libs/core/langchain_core/tracers/event_stream.py +++ b/libs/core/langchain_core/tracers/event_stream.py @@ -787,8 +787,6 @@ async def _astream_events_implementation_v1( root_metadata = config.get("metadata", {}) root_name = config.get("run_name", runnable.get_name()) - # Ignoring mypy complaint about too many different union combinations - # This arises because many of the argument types are unions async for log in _astream_log_implementation( runnable, input, diff --git a/libs/core/langchain_core/tracers/log_stream.py b/libs/core/langchain_core/tracers/log_stream.py index 30d14e93296..311fc8418ca 100644 --- a/libs/core/langchain_core/tracers/log_stream.py +++ b/libs/core/langchain_core/tracers/log_stream.py @@ -17,7 +17,7 @@ from typing import ( overload, ) -import jsonpatch # type: ignore[import] +import jsonpatch # type: ignore[import-untyped] from typing_extensions import NotRequired, TypedDict, override from langchain_core.load import dumps @@ -618,7 +618,7 @@ async def _astream_log_implementation( The implementation has been factored out (at least temporarily) as both astream_log and astream_events relies on it. """ - import jsonpatch # type: ignore[import] + import jsonpatch from langchain_core.callbacks.base import BaseCallbackManager from langchain_core.tracers.log_stream import ( diff --git a/libs/core/langchain_core/utils/function_calling.py b/libs/core/langchain_core/utils/function_calling.py index 49cc37c5598..bfc70df1715 100644 --- a/libs/core/langchain_core/utils/function_calling.py +++ b/libs/core/langchain_core/utils/function_calling.py @@ -313,7 +313,7 @@ def _convert_any_typed_dicts_to_pydantic( subscriptable_origin = _py_38_safe_origin(origin) type_args = tuple( _convert_any_typed_dicts_to_pydantic(arg, depth=depth + 1, visited=visited) - for arg in type_args # type: ignore[index] + for arg in type_args ) return subscriptable_origin[type_args] # type: ignore[index] return type_ diff --git a/libs/core/langchain_core/utils/pydantic.py b/libs/core/langchain_core/utils/pydantic.py index 5b36933dceb..ea987741ce3 100644 --- a/libs/core/langchain_core/utils/pydantic.py +++ b/libs/core/langchain_core/utils/pydantic.py @@ -383,7 +383,7 @@ if IS_PYDANTIC_V2: ) -> Union[dict[str, FieldInfoV2], dict[str, FieldInfoV1]]: """Get the field names of a Pydantic model.""" if hasattr(model, "model_fields"): - return model.model_fields # type: ignore[call-overload,arg-type] + return model.model_fields if hasattr(model, "__fields__"): return model.__fields__ # type: ignore[return-value] @@ -597,7 +597,7 @@ def create_model_v2( Returns: Type[BaseModel]: The created model. """ - field_definitions = cast("dict[str, Any]", field_definitions or {}) # type: ignore[no-redef] + field_definitions = field_definitions or {} if root: if field_definitions: @@ -635,7 +635,7 @@ def create_model_v2( if name.startswith("model"): capture_warnings = True - with warnings.catch_warnings() if capture_warnings else nullcontext(): # type: ignore[attr-defined] + with warnings.catch_warnings() if capture_warnings else nullcontext(): if capture_warnings: warnings.filterwarnings(action="ignore") try: diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index 27166b28765..ac05a4aa689 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -31,10 +31,9 @@ lint = [ "ruff<0.12.0,>=0.11.2", ] typing = [ - "mypy<1.11,>=1.10", + "mypy<1.16,>=1.15", "types-pyyaml<7.0.0.0,>=6.0.12.2", "types-requests<3.0.0.0,>=2.28.11.5", - "types-jinja2<3.0.0,>=2.11.9", "langchain-text-splitters", ] dev = [ @@ -68,11 +67,16 @@ langchain-text-splitters = { path = "../text-splitters" } [tool.mypy] -exclude = [ "notebooks", "examples", "example_data", "langchain_core/pydantic", "tests/unit_tests/utils/test_function_calling.py",] -disallow_untyped_defs = "True" -[[tool.mypy.overrides]] -module = [ "numpy", "pytest",] -ignore_missing_imports = true +strict = "True" +strict_bytes = "True" +enable_error_code = "deprecated" +report_deprecated_as_note = "True" + +# TODO: activate for 'strict' checking +disallow_any_generics = "False" +warn_return_any = "False" +strict_equality = "False" + [tool.ruff] target-version = "py39" diff --git a/libs/core/tests/benchmarks/test_async_callbacks.py b/libs/core/tests/benchmarks/test_async_callbacks.py index b8224a35dd0..508934a64b1 100644 --- a/libs/core/tests/benchmarks/test_async_callbacks.py +++ b/libs/core/tests/benchmarks/test_async_callbacks.py @@ -49,7 +49,7 @@ async def test_async_callbacks_in_sync(benchmark: BenchmarkFixture) -> None: infinite_cycle = cycle([AIMessage(content=" ".join(["hello", "goodbye"] * 500))]) model = GenericFakeChatModel(messages=infinite_cycle) - @benchmark + @benchmark # type: ignore[misc] def sync_callbacks() -> None: for _ in range(5): for _ in model.stream("meow", {"callbacks": [MyCustomAsyncHandler()]}): diff --git a/libs/core/tests/benchmarks/test_imports.py b/libs/core/tests/benchmarks/test_imports.py index 9fdc743cc82..f3b509f0241 100644 --- a/libs/core/tests/benchmarks/test_imports.py +++ b/libs/core/tests/benchmarks/test_imports.py @@ -49,6 +49,6 @@ from pytest_benchmark.fixture import BenchmarkFixture # type: ignore[import-unt ) @pytest.mark.benchmark def test_import_time(benchmark: BenchmarkFixture, import_path: str) -> None: - @benchmark + @benchmark # type: ignore[misc] def import_in_subprocess() -> None: subprocess.run([sys.executable, "-c", import_path], check=False) diff --git a/libs/core/tests/unit_tests/callbacks/test_sync_callback_manager.py b/libs/core/tests/unit_tests/callbacks/test_sync_callback_manager.py index 617d0ac5ee0..d8b3aab6001 100644 --- a/libs/core/tests/unit_tests/callbacks/test_sync_callback_manager.py +++ b/libs/core/tests/unit_tests/callbacks/test_sync_callback_manager.py @@ -1,5 +1,4 @@ -from langchain_core.callbacks.base import BaseCallbackHandler -from langchain_core.callbacks.manager import BaseCallbackManager +from langchain_core.callbacks.base import BaseCallbackHandler, BaseCallbackManager def test_remove_handler() -> None: diff --git a/libs/core/tests/unit_tests/example_selectors/test_similarity.py b/libs/core/tests/unit_tests/example_selectors/test_similarity.py index 8887d8538e2..4d501790c78 100644 --- a/libs/core/tests/unit_tests/example_selectors/test_similarity.py +++ b/libs/core/tests/unit_tests/example_selectors/test_similarity.py @@ -1,5 +1,5 @@ from collections.abc import Iterable -from typing import Any, Optional, cast +from typing import Any, Optional from typing_extensions import override @@ -132,7 +132,7 @@ def test_from_examples() -> None: assert selector.vectorstore_kwargs == {"vs_foo": "vs_bar"} assert isinstance(selector.vectorstore, DummyVectorStore) - vector_store = cast("DummyVectorStore", selector.vectorstore) + vector_store = selector.vectorstore assert vector_store.embeddings is embeddings assert vector_store.init_arg == "some_init_arg" assert vector_store.texts == ["bar"] @@ -158,7 +158,7 @@ async def test_afrom_examples() -> None: assert selector.vectorstore_kwargs == {"vs_foo": "vs_bar"} assert isinstance(selector.vectorstore, DummyVectorStore) - vector_store = cast("DummyVectorStore", selector.vectorstore) + vector_store = selector.vectorstore assert vector_store.embeddings is embeddings assert vector_store.init_arg == "some_init_arg" assert vector_store.texts == ["bar"] @@ -212,7 +212,7 @@ def test_mmr_from_examples() -> None: assert selector.vectorstore_kwargs == {"vs_foo": "vs_bar"} assert isinstance(selector.vectorstore, DummyVectorStore) - vector_store = cast("DummyVectorStore", selector.vectorstore) + vector_store = selector.vectorstore assert vector_store.embeddings is embeddings assert vector_store.init_arg == "some_init_arg" assert vector_store.texts == ["bar"] @@ -240,7 +240,7 @@ async def test_mmr_afrom_examples() -> None: assert selector.vectorstore_kwargs == {"vs_foo": "vs_bar"} assert isinstance(selector.vectorstore, DummyVectorStore) - vector_store = cast("DummyVectorStore", selector.vectorstore) + vector_store = selector.vectorstore assert vector_store.embeddings is embeddings assert vector_store.init_arg == "some_init_arg" assert vector_store.texts == ["bar"] diff --git a/libs/core/tests/unit_tests/output_parsers/test_list_parser.py b/libs/core/tests/unit_tests/output_parsers/test_list_parser.py index 1b176706e2d..c49d80fae8e 100644 --- a/libs/core/tests/unit_tests/output_parsers/test_list_parser.py +++ b/libs/core/tests/unit_tests/output_parsers/test_list_parser.py @@ -1,5 +1,5 @@ from collections.abc import AsyncIterator, Iterable -from typing import TypeVar, cast +from typing import TypeVar from langchain_core.output_parsers.list import ( CommaSeparatedListOutputParser, @@ -101,7 +101,7 @@ def test_numbered_list() -> None: (text2, ["apple", "banana", "cherry"]), (text3, []), ]: - expectedlist = [[a] for a in cast("list[str]", expected)] + expectedlist = [[a] for a in expected] assert parser.parse(text) == expected assert add(parser.transform(t for t in text)) == (expected or None) assert list(parser.transform(t for t in text)) == expectedlist @@ -137,7 +137,7 @@ def test_markdown_list() -> None: (text2, ["apple", "banana", "cherry"]), (text3, []), ]: - expectedlist = [[a] for a in cast("list[str]", expected)] + expectedlist = [[a] for a in expected] assert parser.parse(text) == expected assert add(parser.transform(t for t in text)) == (expected or None) assert list(parser.transform(t for t in text)) == expectedlist @@ -240,7 +240,7 @@ async def test_numbered_list_async() -> None: (text2, ["apple", "banana", "cherry"]), (text3, []), ]: - expectedlist = [[a] for a in cast("list[str]", expected)] + expectedlist = [[a] for a in expected] assert await parser.aparse(text) == expected assert await aadd(parser.atransform(aiter_from_iter(t for t in text))) == ( expected or None @@ -283,7 +283,7 @@ async def test_markdown_list_async() -> None: (text2, ["apple", "banana", "cherry"]), (text3, []), ]: - expectedlist = [[a] for a in cast("list[str]", expected)] + expectedlist = [[a] for a in expected] assert await parser.aparse(text) == expected assert await aadd(parser.atransform(aiter_from_iter(t for t in text))) == ( expected or None diff --git a/libs/core/tests/unit_tests/output_parsers/test_openai_tools.py b/libs/core/tests/unit_tests/output_parsers/test_openai_tools.py index 9fbbd6c446f..c63dd693d74 100644 --- a/libs/core/tests/unit_tests/output_parsers/test_openai_tools.py +++ b/libs/core/tests/unit_tests/output_parsers/test_openai_tools.py @@ -578,7 +578,7 @@ def test_parse_with_different_pydantic_2_proper() -> None: # Can't get pydantic to work here due to the odd typing of tryig to support # both v1 and v2 in the same codebase. - parser = PydanticToolsParser(tools=[Forecast]) # type: ignore[list-item] + parser = PydanticToolsParser(tools=[Forecast]) message = AIMessage( content="", tool_calls=[ @@ -613,7 +613,7 @@ def test_parse_with_different_pydantic_1_proper() -> None: # Can't get pydantic to work here due to the odd typing of tryig to support # both v1 and v2 in the same codebase. - parser = PydanticToolsParser(tools=[Forecast]) # type: ignore[list-item] + parser = PydanticToolsParser(tools=[Forecast]) message = AIMessage( content="", tool_calls=[ diff --git a/libs/core/tests/unit_tests/prompts/test_chat.py b/libs/core/tests/unit_tests/prompts/test_chat.py index 6b4552b078f..922bd6a41aa 100644 --- a/libs/core/tests/unit_tests/prompts/test_chat.py +++ b/libs/core/tests/unit_tests/prompts/test_chat.py @@ -6,7 +6,7 @@ from typing import Any, Union, cast import pytest from packaging import version from pydantic import ValidationError -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from langchain_core._api.deprecation import ( LangChainPendingDeprecationWarning, @@ -15,6 +15,7 @@ from langchain_core.load import dumpd, load from langchain_core.messages import ( AIMessage, BaseMessage, + ChatMessage, HumanMessage, SystemMessage, get_buffer_string, @@ -24,7 +25,6 @@ from langchain_core.prompts import PromptTemplate from langchain_core.prompts.chat import ( AIMessagePromptTemplate, BaseMessagePromptTemplate, - ChatMessage, ChatMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, @@ -82,7 +82,7 @@ def chat_prompt_template( """Create a chat prompt template.""" return ChatPromptTemplate( input_variables=["foo", "bar", "context"], - messages=messages, # type: ignore[arg-type] + messages=messages, ) @@ -403,12 +403,12 @@ def test_chat_invalid_input_variables_extra() -> None: ), ): ChatPromptTemplate( - messages=messages, # type: ignore[arg-type] + messages=messages, input_variables=["foo"], - validate_template=True, # type: ignore[arg-type] + validate_template=True, ) assert ( - ChatPromptTemplate(messages=messages, input_variables=["foo"]).input_variables # type: ignore[arg-type] + ChatPromptTemplate(messages=messages, input_variables=["foo"]).input_variables == [] ) @@ -420,19 +420,19 @@ def test_chat_invalid_input_variables_missing() -> None: match=re.escape("Got mismatched input_variables. Expected: {'foo'}. Got: []"), ): ChatPromptTemplate( - messages=messages, # type: ignore[arg-type] + messages=messages, input_variables=[], - validate_template=True, # type: ignore[arg-type] + validate_template=True, ) assert ChatPromptTemplate( - messages=messages, # type: ignore[arg-type] - input_variables=[], # type: ignore[arg-type] + messages=messages, + input_variables=[], ).input_variables == ["foo"] def test_infer_variables() -> None: messages = [HumanMessagePromptTemplate.from_template("{foo}")] - prompt = ChatPromptTemplate(messages=messages) # type: ignore[arg-type, call-arg] + prompt = ChatPromptTemplate(messages=messages) assert prompt.input_variables == ["foo"] @@ -443,7 +443,7 @@ def test_chat_valid_with_partial_variables() -> None: ) ] prompt = ChatPromptTemplate( - messages=messages, # type: ignore[arg-type] + messages=messages, input_variables=["question", "context"], partial_variables={"formatins": "some structure"}, ) @@ -457,9 +457,9 @@ def test_chat_valid_infer_variables() -> None: "Do something with {question} using {context} giving it like {formatins}" ) ] - prompt = ChatPromptTemplate( # type: ignore[call-arg] - messages=messages, # type: ignore[arg-type] - partial_variables={"formatins": "some structure"}, # type: ignore[arg-type] + prompt = ChatPromptTemplate( + messages=messages, + partial_variables={"formatins": "some structure"}, ) assert set(prompt.input_variables) == {"question", "context"} assert prompt.partial_variables == {"formatins": "some structure"} diff --git a/libs/core/tests/unit_tests/prompts/test_prompt.py b/libs/core/tests/unit_tests/prompts/test_prompt.py index 82c567c93a7..08a7b81e871 100644 --- a/libs/core/tests/unit_tests/prompts/test_prompt.py +++ b/libs/core/tests/unit_tests/prompts/test_prompt.py @@ -6,7 +6,7 @@ from unittest import mock import pytest from packaging import version -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from langchain_core.prompts.prompt import PromptTemplate from langchain_core.prompts.string import PromptTemplateFormat diff --git a/libs/core/tests/unit_tests/runnables/test_fallbacks.py b/libs/core/tests/unit_tests/runnables/test_fallbacks.py index a3dbc74eee9..3985facc544 100644 --- a/libs/core/tests/unit_tests/runnables/test_fallbacks.py +++ b/libs/core/tests/unit_tests/runnables/test_fallbacks.py @@ -8,7 +8,7 @@ from typing import ( import pytest from pydantic import BaseModel -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from typing_extensions import override from langchain_core.callbacks import CallbackManagerForLLMRun diff --git a/libs/core/tests/unit_tests/runnables/test_graph.py b/libs/core/tests/unit_tests/runnables/test_graph.py index 545db59c8be..387644b8b07 100644 --- a/libs/core/tests/unit_tests/runnables/test_graph.py +++ b/libs/core/tests/unit_tests/runnables/test_graph.py @@ -2,7 +2,7 @@ from typing import Any, Optional from packaging import version from pydantic import BaseModel -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from typing_extensions import override from langchain_core.language_models import FakeListLLM @@ -10,7 +10,8 @@ from langchain_core.output_parsers.list import CommaSeparatedListOutputParser from langchain_core.output_parsers.string import StrOutputParser from langchain_core.output_parsers.xml import XMLOutputParser from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.runnables.base import Runnable, RunnableConfig +from langchain_core.runnables import RunnableConfig +from langchain_core.runnables.base import Runnable from langchain_core.runnables.graph import Edge, Graph, Node from langchain_core.runnables.graph_mermaid import _escape_node_label from langchain_core.utils.pydantic import ( diff --git a/libs/core/tests/unit_tests/runnables/test_imports.py b/libs/core/tests/unit_tests/runnables/test_imports.py index e458cca21b2..f0ebf48c446 100644 --- a/libs/core/tests/unit_tests/runnables/test_imports.py +++ b/libs/core/tests/unit_tests/runnables/test_imports.py @@ -40,4 +40,6 @@ def test_all_imports() -> None: def test_imports_for_specific_funcs() -> None: """Test that a few specific imports in more internal namespaces.""" # create_model implementation has been moved to langchain_core.utils.pydantic - from langchain_core.runnables.utils import create_model # noqa: F401 + from langchain_core.runnables.utils import ( # type: ignore[attr-defined] # noqa: F401 + create_model, + ) diff --git a/libs/core/tests/unit_tests/runnables/test_runnable.py b/libs/core/tests/unit_tests/runnables/test_runnable.py index 89cdf2b9695..fa7cd67ccd4 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable.py @@ -20,7 +20,7 @@ from freezegun import freeze_time from packaging import version from pydantic import BaseModel, Field from pytest_mock import MockerFixture -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from typing_extensions import TypedDict, override from langchain_core.callbacks.manager import ( @@ -557,7 +557,7 @@ def test_lambda_schemas(snapshot: SnapshotAssertion) -> None: } second_lambda = lambda x, y: (x["hello"], x["bye"], y["bah"]) # noqa: E731 - assert RunnableLambda(second_lambda).get_input_jsonschema() == { # type: ignore[arg-type] + assert RunnableLambda(second_lambda).get_input_jsonschema() == { "title": "RunnableLambdaInput", "type": "object", "properties": {"hello": {"title": "Hello"}, "bye": {"title": "Bye"}}, @@ -1012,7 +1012,7 @@ def test_passthrough_tap(mocker: MockerFixture) -> None: seq: Runnable = RunnablePassthrough(mock) | fake | RunnablePassthrough(mock) - assert seq.invoke("hello", my_kwarg="value") == 5 # type: ignore[call-arg] + assert seq.invoke("hello", my_kwarg="value") == 5 assert mock.call_args_list == [ mocker.call("hello", my_kwarg="value"), mocker.call(5), @@ -4488,7 +4488,7 @@ def test_runnable_branch_invoke() -> None: branch = RunnableBranch[int, int]( (lambda x: x > 100, raise_value_error), # mypy cannot infer types from the lambda - (lambda x: x > 0 and x < 5, lambda x: x + 1), # type: ignore[misc] + (lambda x: x > 0 and x < 5, lambda x: x + 1), (lambda x: x > 5, lambda x: x * 10), lambda x: x - 1, ) diff --git a/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py b/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py index 1d3304c768e..000bad74f8d 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py @@ -2759,9 +2759,7 @@ async def test_custom_event_root_dispatch_with_in_tool() -> None: return x + 1 # Ignoring type due to @tool not returning correct type annotations - events = await _collect_events( - foo.astream_events({"x": 2}, version="v2") # type: ignore[attr-defined] - ) + events = await _collect_events(foo.astream_events({"x": 2}, version="v2")) _assert_events_equal_allow_superset_metadata( events, [ diff --git a/libs/core/tests/unit_tests/runnables/test_tracing_interops.py b/libs/core/tests/unit_tests/runnables/test_tracing_interops.py index 67cacadaad4..eaf385cae26 100644 --- a/libs/core/tests/unit_tests/runnables/test_tracing_interops.py +++ b/libs/core/tests/unit_tests/runnables/test_tracing_interops.py @@ -163,13 +163,13 @@ async def test_config_traceable_async_handoff() -> None: def my_great_grandchild_function(a: int) -> int: return my_great_great_grandchild_function(a) - @RunnableLambda # type: ignore[arg-type,attr-defined] + @RunnableLambda # type: ignore[arg-type] async def my_grandchild_function(a: int) -> int: return my_great_grandchild_function.invoke(a) @traceable async def my_child_function(a: int) -> int: - return await my_grandchild_function.ainvoke(a) * 3 # type: ignore[arg-type,attr-defined] + return await my_grandchild_function.ainvoke(a) * 3 # type: ignore[arg-type] @traceable() async def my_function(a: int) -> int: @@ -337,7 +337,7 @@ class TestRunnableSequenceParallelTraceNesting: assert posts[i]["name"] == name dotted_order = posts[i]["dotted_order"] if prev_dotted_order is not None and not str( - expected_parents[name] + expected_parents[name] # type: ignore[index] ).startswith("RunnableParallel"): assert dotted_order > prev_dotted_order, ( f"{name} not after {name_order[i - 1]}" diff --git a/libs/core/tests/unit_tests/test_messages.py b/libs/core/tests/unit_tests/test_messages.py index f667129539c..ecd940ec499 100644 --- a/libs/core/tests/unit_tests/test_messages.py +++ b/libs/core/tests/unit_tests/test_messages.py @@ -435,8 +435,8 @@ def test_message_chunk_to_message() -> None: expected = AIMessage( content="I am", tool_calls=[ - create_tool_call(name="tool1", args={"a": 1}, id="1"), # type: ignore[arg-type] - create_tool_call(name="tool2", args={}, id="2"), # type: ignore[arg-type] + create_tool_call(name="tool1", args={"a": 1}, id="1"), + create_tool_call(name="tool2", args={}, id="2"), ], invalid_tool_calls=[ create_invalid_tool_call(name="tool3", args=None, id="3", error=None), diff --git a/libs/core/tests/unit_tests/test_tools.py b/libs/core/tests/unit_tests/test_tools.py index db1b09ffa3c..ca6de8f671d 100644 --- a/libs/core/tests/unit_tests/test_tools.py +++ b/libs/core/tests/unit_tests/test_tools.py @@ -907,7 +907,7 @@ def test_validation_error_handling_non_validation_error( async def _arun(self) -> str: return "dummy" - _tool = _RaiseNonValidationErrorTool(handle_validation_error=handler) # type: ignore[call-arg] + _tool = _RaiseNonValidationErrorTool(handle_validation_error=handler) with pytest.raises(NotImplementedError): _tool.run({}) @@ -972,7 +972,7 @@ async def test_async_validation_error_handling_non_validation_error( async def _arun(self) -> str: return "dummy" - _tool = _RaiseNonValidationErrorTool(handle_validation_error=handler) # type: ignore[call-arg] + _tool = _RaiseNonValidationErrorTool(handle_validation_error=handler) with pytest.raises(NotImplementedError): await _tool.arun({}) @@ -1901,7 +1901,7 @@ def test_args_schema_explicitly_typed() -> None: # type ignoring here since we're allowing overriding a type # signature of pydantic.v1.BaseModel with pydantic.BaseModel # for pydantic 2! - args_schema: type[BaseModel] = Foo # type: ignore[assignment] + args_schema: type[BaseModel] = Foo def _run(self, *args: Any, **kwargs: Any) -> str: return "foo" @@ -2157,7 +2157,7 @@ def test_tool_annotations_preserved() -> None: """Tool docstring.""" return "foo" - schema = my_tool.get_input_schema() # type: ignore[attr-defined] + schema = my_tool.get_input_schema() func = my_tool.func # type: ignore[attr-defined] diff --git a/libs/core/tests/unit_tests/utils/test_function_calling.py b/libs/core/tests/unit_tests/utils/test_function_calling.py index 1d2e264153b..126c1753f4e 100644 --- a/libs/core/tests/unit_tests/utils/test_function_calling.py +++ b/libs/core/tests/unit_tests/utils/test_function_calling.py @@ -1,4 +1,3 @@ -# mypy: disable-error-code="annotation-unchecked" import sys import typing from collections.abc import Iterable, Mapping, MutableMapping, Sequence @@ -15,12 +14,11 @@ from typing import TypedDict as TypingTypedDict import pytest from pydantic import BaseModel as BaseModelV2Maybe # pydantic: ignore from pydantic import Field as FieldV2Maybe # pydantic: ignore -from typing_extensions import ( - TypedDict as ExtensionsTypedDict, -) +from typing_extensions import TypeAlias +from typing_extensions import TypedDict as ExtensionsTypedDict try: - from typing import Annotated as TypingAnnotated # type: ignore[attr-defined] + from typing import Annotated as TypingAnnotated except ImportError: TypingAnnotated = ExtensionsAnnotated @@ -387,8 +385,8 @@ def test_convert_to_openai_function( assert actual == runnable_expected # Test simple Tool - def my_function(input_string: str) -> str: - pass + def my_function(_: str) -> str: + return "" tool = Tool( name="dummy_function", @@ -738,20 +736,25 @@ def test_tool_outputs() -> None: assert not response.tool_calls -@pytest.mark.parametrize("use_extension_typed_dict", [True, False]) -@pytest.mark.parametrize("use_extension_annotated", [True, False]) +@pytest.mark.parametrize( + "typed_dict", + [ExtensionsTypedDict, TypingTypedDict], + ids=["typing_extensions.TypedDict", "typing.TypedDict"], +) +@pytest.mark.parametrize( + "annotated", + [ExtensionsAnnotated, TypingAnnotated], + ids=["typing_extensions.Annotated", "typing.Annotated"], +) def test__convert_typed_dict_to_openai_function( - *, use_extension_typed_dict: bool, use_extension_annotated: bool + typed_dict: TypeAlias, annotated: TypeAlias ) -> None: - typed_dict = ExtensionsTypedDict if use_extension_typed_dict else TypingTypedDict - annotated = TypingAnnotated if use_extension_annotated else TypingAnnotated - - class SubTool(typed_dict): + class SubTool(typed_dict): # type: ignore[misc] """Subtool docstring.""" args: annotated[dict[str, Any], {}, "this does bar"] # noqa: F722 - class Tool(typed_dict): + class Tool(typed_dict): # type: ignore[misc] """Docstring. Args: @@ -981,7 +984,7 @@ def test__convert_typed_dict_to_openai_function( @pytest.mark.parametrize("typed_dict", [ExtensionsTypedDict, TypingTypedDict]) def test__convert_typed_dict_to_openai_function_fail(typed_dict: type) -> None: - class Tool(typed_dict): + class Tool(typed_dict): # type: ignore[misc] arg1: typing.MutableSet # Pydantic 2 supports this, but pydantic v1 does not. # Error should be raised since we're using v1 code path here @@ -994,11 +997,12 @@ def test__convert_typed_dict_to_openai_function_fail(typed_dict: type) -> None: ) def test_convert_union_type_py_39() -> None: @tool - def magic_function(input: int | str) -> str: # noqa: FA102 + def magic_function(value: int | str) -> str: # type: ignore[syntax,unused-ignore] # noqa: ARG001,FA102 """Compute a magic function.""" + return "" result = convert_to_openai_function(magic_function) - assert result["parameters"]["properties"]["input"] == { + assert result["parameters"]["properties"]["value"] == { "anyOf": [{"type": "integer"}, {"type": "string"}] } diff --git a/libs/core/tests/unit_tests/utils/test_pydantic.py b/libs/core/tests/unit_tests/utils/test_pydantic.py index 8cea721c616..88ca45ff1d8 100644 --- a/libs/core/tests/unit_tests/utils/test_pydantic.py +++ b/libs/core/tests/unit_tests/utils/test_pydantic.py @@ -56,7 +56,6 @@ def test_pre_init_decorator_with_more_defaults() -> None: return v # Try to create an instance of Foo - # nothing is required, but mypy can't track the default for `c` Foo() @@ -177,7 +176,7 @@ def test_fields_pydantic_v1() -> None: x: int fields = get_fields(Foo) - assert fields == {"x": Foo.model_fields["x"]} # type: ignore[index] + assert fields == {"x": Foo.model_fields["x"]} @pytest.mark.skipif(not IS_PYDANTIC_V2, reason="Only tests Pydantic v2") diff --git a/libs/core/uv.lock b/libs/core/uv.lock index c15e021fe1d..99db2569571 100644 --- a/libs/core/uv.lock +++ b/libs/core/uv.lock @@ -980,7 +980,6 @@ test = [ typing = [ { name = "langchain-text-splitters" }, { name = "mypy" }, - { name = "types-jinja2" }, { name = "types-pyyaml" }, { name = "types-requests" }, ] @@ -1025,8 +1024,7 @@ test = [ test-integration = [] typing = [ { name = "langchain-text-splitters", directory = "../text-splitters" }, - { name = "mypy", specifier = ">=1.10,<1.11" }, - { name = "types-jinja2", specifier = ">=2.11.9,<3.0.0" }, + { name = "mypy", specifier = ">=1.15,<1.16" }, { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, { name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" }, ] @@ -1244,36 +1242,46 @@ wheels = [ [[package]] name = "mypy" -version = "1.10.1" +version = "1.15.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions" }, { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c7/b9/81e4c6dbb1ec1e72503de3ff2c5fe4b7f224e04613b670f8b9004cd8a4dd/mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0", size = 3022304 } +sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717 } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/24/8a179de3ed98e1882f640d431db25a17fc5813258fded79674e475501f87/mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02", size = 10817694 }, - { url = "https://files.pythonhosted.org/packages/f3/80/1675d07cfb4cc12bedcb5bb426f256d8c8da3668cbf300121e39333f0c96/mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7", size = 9975362 }, - { url = "https://files.pythonhosted.org/packages/d5/a0/684ebd636f258bdd263b12be46dd4e1ed33ac73a76d590b209c026e3c65f/mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a", size = 12728032 }, - { url = "https://files.pythonhosted.org/packages/f3/c8/1f881f08e93ea8165113ab0fad490262b0466d0c2616c13c1bb85741ff87/mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9", size = 12797751 }, - { url = "https://files.pythonhosted.org/packages/4a/d3/46c81d90576e2e766144c0e436fa397a7387092fe29c6ef964f91d92778d/mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d", size = 9398835 }, - { url = "https://files.pythonhosted.org/packages/38/cf/0645128c6edf70eb9b9687ad42fcb61ea344a7927ed2b78ce2275282fe87/mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a", size = 10740526 }, - { url = "https://files.pythonhosted.org/packages/19/c9/10842953066265e6063c41a85bbee3b877501947c970ea84a1db5f11d32e/mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84", size = 9898375 }, - { url = "https://files.pythonhosted.org/packages/e4/9e/551e897f67c5d67aa1976bc3b4951f297d1daf07250c421bb045b2613350/mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f", size = 12602338 }, - { url = "https://files.pythonhosted.org/packages/2b/a4/55e3635253e5fa7051674dd5a67582f08b0ba8823e1fdbf7241ed5b32d4e/mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b", size = 12680741 }, - { url = "https://files.pythonhosted.org/packages/7a/cc/aa881ad051f99915887db0b5de8facc0e224295be22f92178c8f77fd8359/mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e", size = 9393661 }, - { url = "https://files.pythonhosted.org/packages/5d/86/3c3bdaccc3cbd1372acb15667a2c2cb773523a710a22e2748cbda9a7c1e2/mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7", size = 10864022 }, - { url = "https://files.pythonhosted.org/packages/ec/05/7c87b26b6a769b70f6c0b8a6daef01fc6f3ae566df89a2fa9d04f690b0d3/mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3", size = 9857795 }, - { url = "https://files.pythonhosted.org/packages/ff/b5/cbccba4dca9703c4c467171e7f61ea6a1a75eae991208aa5bc7d49807f91/mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e", size = 12647633 }, - { url = "https://files.pythonhosted.org/packages/02/3c/1f5e57c8cfab4299f7189821ae8bb4896e8e623a04d293fd32e32eb0e617/mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04", size = 12730251 }, - { url = "https://files.pythonhosted.org/packages/f9/20/d33608e8dc3bc0f5966fc1f6c2d16671f0725dcca279beec47c3e19afd9d/mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31", size = 9491734 }, - { url = "https://files.pythonhosted.org/packages/50/00/86bb2f6c5b58fc6f360dd4cb5c0666dc67c05007c2cddcc694528a59a604/mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3", size = 10813338 }, - { url = "https://files.pythonhosted.org/packages/33/b0/20c9f6dcbfb312d1804a81f4a39e0b401fe614dc0de580249b6deb07d053/mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf", size = 9968141 }, - { url = "https://files.pythonhosted.org/packages/aa/87/ec65c45b2e5160203a680b0f79e459fbe9c192f9eff501c3fec82bef3be5/mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531", size = 12724186 }, - { url = "https://files.pythonhosted.org/packages/c5/5c/f61c876647036d572a1434f0251257a04f2a7bd038c718b28fa5ca699515/mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3", size = 12796640 }, - { url = "https://files.pythonhosted.org/packages/69/09/5cd3609d18cf27ec84f380a92b3da695f84781de119d68db69d274c3d1ee/mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f", size = 9396105 }, - { url = "https://files.pythonhosted.org/packages/2b/ee/d53a3d4792a09b6cd757978951d6dcf8b10825a8b8522b68e9b5eb53b9a1/mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a", size = 2580108 }, + { url = "https://files.pythonhosted.org/packages/68/f8/65a7ce8d0e09b6329ad0c8d40330d100ea343bd4dd04c4f8ae26462d0a17/mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13", size = 10738433 }, + { url = "https://files.pythonhosted.org/packages/b4/95/9c0ecb8eacfe048583706249439ff52105b3f552ea9c4024166c03224270/mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559", size = 9861472 }, + { url = "https://files.pythonhosted.org/packages/84/09/9ec95e982e282e20c0d5407bc65031dfd0f0f8ecc66b69538296e06fcbee/mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b", size = 11611424 }, + { url = "https://files.pythonhosted.org/packages/78/13/f7d14e55865036a1e6a0a69580c240f43bc1f37407fe9235c0d4ef25ffb0/mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3", size = 12365450 }, + { url = "https://files.pythonhosted.org/packages/48/e1/301a73852d40c241e915ac6d7bcd7fedd47d519246db2d7b86b9d7e7a0cb/mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b", size = 12551765 }, + { url = "https://files.pythonhosted.org/packages/77/ba/c37bc323ae5fe7f3f15a28e06ab012cd0b7552886118943e90b15af31195/mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828", size = 9274701 }, + { url = "https://files.pythonhosted.org/packages/03/bc/f6339726c627bd7ca1ce0fa56c9ae2d0144604a319e0e339bdadafbbb599/mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f", size = 10662338 }, + { url = "https://files.pythonhosted.org/packages/e2/90/8dcf506ca1a09b0d17555cc00cd69aee402c203911410136cd716559efe7/mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5", size = 9787540 }, + { url = "https://files.pythonhosted.org/packages/05/05/a10f9479681e5da09ef2f9426f650d7b550d4bafbef683b69aad1ba87457/mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e", size = 11538051 }, + { url = "https://files.pythonhosted.org/packages/e9/9a/1f7d18b30edd57441a6411fcbc0c6869448d1a4bacbaee60656ac0fc29c8/mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c", size = 12286751 }, + { url = "https://files.pythonhosted.org/packages/72/af/19ff499b6f1dafcaf56f9881f7a965ac2f474f69f6f618b5175b044299f5/mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f", size = 12421783 }, + { url = "https://files.pythonhosted.org/packages/96/39/11b57431a1f686c1aed54bf794870efe0f6aeca11aca281a0bd87a5ad42c/mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f", size = 9265618 }, + { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981 }, + { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175 }, + { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675 }, + { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020 }, + { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582 }, + { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614 }, + { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592 }, + { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611 }, + { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443 }, + { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541 }, + { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348 }, + { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648 }, + { url = "https://files.pythonhosted.org/packages/5a/fa/79cf41a55b682794abe71372151dbbf856e3008f6767057229e6649d294a/mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078", size = 10737129 }, + { url = "https://files.pythonhosted.org/packages/d3/33/dd8feb2597d648de29e3da0a8bf4e1afbda472964d2a4a0052203a6f3594/mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba", size = 9856335 }, + { url = "https://files.pythonhosted.org/packages/e4/b5/74508959c1b06b96674b364ffeb7ae5802646b32929b7701fc6b18447592/mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5", size = 11611935 }, + { url = "https://files.pythonhosted.org/packages/6c/53/da61b9d9973efcd6507183fdad96606996191657fe79701b2c818714d573/mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b", size = 12365827 }, + { url = "https://files.pythonhosted.org/packages/c1/72/965bd9ee89540c79a25778cc080c7e6ef40aa1eeac4d52cec7eae6eb5228/mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2", size = 12541924 }, + { url = "https://files.pythonhosted.org/packages/46/d0/f41645c2eb263e6c77ada7d76f894c580c9ddb20d77f0c24d34273a4dab2/mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980", size = 9271176 }, + { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777 }, ] [[package]] @@ -2569,27 +2577,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 }, ] -[[package]] -name = "types-jinja2" -version = "2.11.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "types-markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/46/c4/b82309bfed8195de7997672deac301bd6f5bd5cbb6a3e392b7fe780d7852/types-Jinja2-2.11.9.tar.gz", hash = "sha256:dbdc74a40aba7aed520b7e4d89e8f0fe4286518494208b35123bcf084d4b8c81", size = 13302 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b0/e79d84748f1d34304f13191424348a719c3febaa3493835370fe9528e1e6/types_Jinja2-2.11.9-py3-none-any.whl", hash = "sha256:60a1e21e8296979db32f9374d8a239af4cb541ff66447bb915d8ad398f9c63b2", size = 18190 }, -] - -[[package]] -name = "types-markupsafe" -version = "1.1.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/39/31/b5f059142d058aec41e913d8e0eff0a967e7bc46f9a2ba2f31bc11cff059/types-MarkupSafe-1.1.10.tar.gz", hash = "sha256:85b3a872683d02aea3a5ac2a8ef590193c344092032f58457287fbf8e06711b1", size = 2986 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/d6/b8effb1c48539260a5eb4196afc55efac4ea1684a4991977555eb266b2ef/types_MarkupSafe-1.1.10-py3-none-any.whl", hash = "sha256:ca2bee0f4faafc45250602567ef38d533e877d2ddca13003b319c551ff5b3cc5", size = 3998 }, -] - [[package]] name = "types-python-dateutil" version = "2.9.0.20241206"