diff --git a/libs/core/langchain_core/language_models/chat_models.py b/libs/core/langchain_core/language_models/chat_models.py index 05af1f9ac60..28963f8a945 100644 --- a/libs/core/langchain_core/language_models/chat_models.py +++ b/libs/core/langchain_core/language_models/chat_models.py @@ -1,9 +1,9 @@ from __future__ import annotations import asyncio +import builtins import inspect import json -import typing import uuid import warnings from abc import ABC, abstractmethod @@ -1103,20 +1103,18 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC): def bind_tools( self, - tools: Sequence[ - Union[typing.Dict[str, Any], type, Callable, BaseTool] # noqa: UP006 - ], + tools: Sequence[Union[builtins.dict[str, Any], type, Callable, BaseTool]], **kwargs: Any, ) -> Runnable[LanguageModelInput, BaseMessage]: raise NotImplementedError() def with_structured_output( self, - schema: Union[typing.Dict, type], # noqa: UP006 + schema: Union[builtins.dict, type], *, include_raw: bool = False, **kwargs: Any, - ) -> Runnable[LanguageModelInput, Union[typing.Dict, BaseModel]]: # noqa: UP006 + ) -> Runnable[LanguageModelInput, Union[builtins.dict, BaseModel]]: """Model wrapper that returns outputs formatted to match the given schema. Args: diff --git a/libs/core/langchain_core/prompts/base.py b/libs/core/langchain_core/prompts/base.py index f6932d6060a..f6e5e063f5c 100644 --- a/libs/core/langchain_core/prompts/base.py +++ b/libs/core/langchain_core/prompts/base.py @@ -2,7 +2,6 @@ from __future__ import annotations import contextlib import json -import typing from abc import ABC, abstractmethod from collections.abc import Mapping from functools import cached_property @@ -51,7 +50,7 @@ class BasePromptTemplate( """optional_variables: A list of the names of the variables for placeholder or MessagePlaceholder that are optional. These variables are auto inferred from the prompt and user need not provide them.""" - input_types: typing.Dict[str, Any] = Field(default_factory=dict, exclude=True) # noqa: UP006 + input_types: dict[str, Any] = Field(default_factory=dict, exclude=True) """A dictionary of the types of the variables the prompt template expects. If not provided, all variables are assumed to be strings.""" output_parser: Optional[BaseOutputParser] = None @@ -61,7 +60,7 @@ class BasePromptTemplate( Partial variables populate the template so that you don't need to pass them in every time you call the prompt.""" - metadata: Optional[typing.Dict[str, Any]] = None # noqa: UP006 + metadata: Optional[dict] = None """Metadata to be used for tracing.""" tags: Optional[list[str]] = None """Tags to be used for tracing."""