core[performance]: use custom __getattr__ in __init__.py files for lazy imports (#30769)

Most easily reviewed with the "hide whitespace" option toggled.

Seeing 10-50% speed ups in import time for common structures 🚀 

The general purpose of this PR is to lazily import structures within
`langchain_core.XXX_module.__init__.py` so that we're not eagerly
importing expensive dependencies (`pydantic`, `requests`, etc).

Analysis of flamegraphs generated with `importtime` motivated these
changes. For example, the one below demonstrates that importing
`HumanMessage` accidentally triggered imports for `importlib.metadata`,
`requests`, etc.

There's still much more to do on this front, and we can start digging
into our own internal code for optimizations now that we're less
concerned about external imports.

<img width="1210" alt="Screenshot 2025-04-11 at 1 10 54 PM"
src="https://github.com/user-attachments/assets/112a3fe7-24a9-4294-92c1-d5ae64df839e"
/>

I've tracked the improvements with some local benchmarks:

## `pytest-benchmark` results

| Name | Before (s) | After (s) | Delta (s) | % Change |

|-----------------------------|------------|-----------|-----------|----------|
| Document | 2.8683 | 1.2775 | -1.5908 | -55.46% |
| HumanMessage | 2.2358 | 1.1673 | -1.0685 | -47.79% |
| ChatPromptTemplate | 5.5235 | 2.9709 | -2.5526 | -46.22% |
| Runnable | 2.9423 | 1.7793 | -1.163 | -39.53% |
| InMemoryVectorStore | 3.1180 | 1.8417 | -1.2763 | -40.93% |
| RunnableLambda | 2.7385 | 1.8745 | -0.864 | -31.55% |
| tool | 5.1231 | 4.0771 | -1.046 | -20.42% |
| CallbackManager | 4.2263 | 3.4099 | -0.8164 | -19.32% |
| LangChainTracer | 3.8394 | 3.3101 | -0.5293 | -13.79% |
| BaseChatModel | 4.3317 | 3.8806 | -0.4511 | -10.41% |
| PydanticOutputParser | 3.2036 | 3.2995 | 0.0959 | 2.99% |
| InMemoryRateLimiter | 0.5311 | 0.5995 | 0.0684 | 12.88% |

Note the lack of change for `InMemoryRateLimiter` and
`PydanticOutputParser` is just random noise, I'm getting comparable
numbers locally.

## Local CodSpeed results

We're still working on configuring CodSpeed on CI. The local usage
produced similar results.
This commit is contained in:
Sydney Runkle 2025-04-14 08:57:54 -04:00 committed by GitHub
parent ada740b5b9
commit 4f69094b51
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 949 additions and 292 deletions

View File

@ -9,20 +9,24 @@ This module is only relevant for LangChain developers, not for users.
""" """
from .beta_decorator import ( from importlib import import_module
LangChainBetaWarning, from typing import TYPE_CHECKING
beta,
suppress_langchain_beta_warning, if TYPE_CHECKING:
surface_langchain_beta_warnings, from .beta_decorator import (
) LangChainBetaWarning,
from .deprecation import ( beta,
LangChainDeprecationWarning, suppress_langchain_beta_warning,
deprecated, surface_langchain_beta_warnings,
suppress_langchain_deprecation_warning, )
surface_langchain_deprecation_warnings, from .deprecation import (
warn_deprecated, LangChainDeprecationWarning,
) deprecated,
from .path import as_import_path, get_relative_path suppress_langchain_deprecation_warning,
surface_langchain_deprecation_warnings,
warn_deprecated,
)
from .path import as_import_path, get_relative_path
__all__ = [ __all__ = [
"as_import_path", "as_import_path",
@ -37,3 +41,33 @@ __all__ = [
"surface_langchain_deprecation_warnings", "surface_langchain_deprecation_warnings",
"warn_deprecated", "warn_deprecated",
] ]
_dynamic_imports = {
"LangChainBetaWarning": "beta_decorator",
"beta": "beta_decorator",
"suppress_langchain_beta_warning": "beta_decorator",
"surface_langchain_beta_warnings": "beta_decorator",
"as_import_path": "path",
"get_relative_path": "path",
"LangChainDeprecationWarning": "deprecation",
"deprecated": "deprecation",
"surface_langchain_deprecation_warnings": "deprecation",
"suppress_langchain_deprecation_warning": "deprecation",
"warn_deprecated": "deprecation",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -7,46 +7,50 @@
BaseCallbackHandler --> <name>CallbackHandler # Example: AimCallbackHandler BaseCallbackHandler --> <name>CallbackHandler # Example: AimCallbackHandler
""" """
from langchain_core.callbacks.base import ( from importlib import import_module
AsyncCallbackHandler, from typing import TYPE_CHECKING
BaseCallbackHandler,
BaseCallbackManager, if TYPE_CHECKING:
CallbackManagerMixin, from langchain_core.callbacks.base import (
Callbacks, AsyncCallbackHandler,
ChainManagerMixin, BaseCallbackHandler,
LLMManagerMixin, BaseCallbackManager,
RetrieverManagerMixin, CallbackManagerMixin,
RunManagerMixin, Callbacks,
ToolManagerMixin, ChainManagerMixin,
) LLMManagerMixin,
from langchain_core.callbacks.file import FileCallbackHandler RetrieverManagerMixin,
from langchain_core.callbacks.manager import ( RunManagerMixin,
AsyncCallbackManager, ToolManagerMixin,
AsyncCallbackManagerForChainGroup, )
AsyncCallbackManagerForChainRun, from langchain_core.callbacks.file import FileCallbackHandler
AsyncCallbackManagerForLLMRun, from langchain_core.callbacks.manager import (
AsyncCallbackManagerForRetrieverRun, AsyncCallbackManager,
AsyncCallbackManagerForToolRun, AsyncCallbackManagerForChainGroup,
AsyncParentRunManager, AsyncCallbackManagerForChainRun,
AsyncRunManager, AsyncCallbackManagerForLLMRun,
BaseRunManager, AsyncCallbackManagerForRetrieverRun,
CallbackManager, AsyncCallbackManagerForToolRun,
CallbackManagerForChainGroup, AsyncParentRunManager,
CallbackManagerForChainRun, AsyncRunManager,
CallbackManagerForLLMRun, BaseRunManager,
CallbackManagerForRetrieverRun, CallbackManager,
CallbackManagerForToolRun, CallbackManagerForChainGroup,
ParentRunManager, CallbackManagerForChainRun,
RunManager, CallbackManagerForLLMRun,
adispatch_custom_event, CallbackManagerForRetrieverRun,
dispatch_custom_event, CallbackManagerForToolRun,
) ParentRunManager,
from langchain_core.callbacks.stdout import StdOutCallbackHandler RunManager,
from langchain_core.callbacks.streaming_stdout import StreamingStdOutCallbackHandler adispatch_custom_event,
from langchain_core.callbacks.usage import ( dispatch_custom_event,
UsageMetadataCallbackHandler, )
get_usage_metadata_callback, from langchain_core.callbacks.stdout import StdOutCallbackHandler
) from langchain_core.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from langchain_core.callbacks.usage import (
UsageMetadataCallbackHandler,
get_usage_metadata_callback,
)
__all__ = [ __all__ = [
"dispatch_custom_event", "dispatch_custom_event",
@ -84,3 +88,56 @@ __all__ = [
"UsageMetadataCallbackHandler", "UsageMetadataCallbackHandler",
"get_usage_metadata_callback", "get_usage_metadata_callback",
] ]
_dynamic_imports = {
"AsyncCallbackHandler": "base",
"BaseCallbackHandler": "base",
"BaseCallbackManager": "base",
"CallbackManagerMixin": "base",
"Callbacks": "base",
"ChainManagerMixin": "base",
"LLMManagerMixin": "base",
"RetrieverManagerMixin": "base",
"RunManagerMixin": "base",
"ToolManagerMixin": "base",
"FileCallbackHandler": "file",
"AsyncCallbackManager": "manager",
"AsyncCallbackManagerForChainGroup": "manager",
"AsyncCallbackManagerForChainRun": "manager",
"AsyncCallbackManagerForLLMRun": "manager",
"AsyncCallbackManagerForRetrieverRun": "manager",
"AsyncCallbackManagerForToolRun": "manager",
"AsyncParentRunManager": "manager",
"AsyncRunManager": "manager",
"BaseRunManager": "manager",
"CallbackManager": "manager",
"CallbackManagerForChainGroup": "manager",
"CallbackManagerForChainRun": "manager",
"CallbackManagerForLLMRun": "manager",
"CallbackManagerForRetrieverRun": "manager",
"CallbackManagerForToolRun": "manager",
"ParentRunManager": "manager",
"RunManager": "manager",
"adispatch_custom_event": "manager",
"dispatch_custom_event": "manager",
"StdOutCallbackHandler": "stdout",
"StreamingStdOutCallbackHandler": "streaming_stdout",
"UsageMetadataCallbackHandler": "usage",
"get_usage_metadata_callback": "usage",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -1,8 +1,12 @@
"""Document loaders.""" """Document loaders."""
from langchain_core.document_loaders.base import BaseBlobParser, BaseLoader from importlib import import_module
from langchain_core.document_loaders.blob_loaders import Blob, BlobLoader, PathLike from typing import TYPE_CHECKING
from langchain_core.document_loaders.langsmith import LangSmithLoader
if TYPE_CHECKING:
from langchain_core.document_loaders.base import BaseBlobParser, BaseLoader
from langchain_core.document_loaders.blob_loaders import Blob, BlobLoader, PathLike
from langchain_core.document_loaders.langsmith import LangSmithLoader
__all__ = [ __all__ = [
"BaseBlobParser", "BaseBlobParser",
@ -12,3 +16,28 @@ __all__ = [
"PathLike", "PathLike",
"LangSmithLoader", "LangSmithLoader",
] ]
_dynamic_imports = {
"BaseBlobParser": "base",
"BaseLoader": "base",
"Blob": "blob_loaders",
"BlobLoader": "blob_loaders",
"PathLike": "blob_loaders",
"LangSmithLoader": "langsmith",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -5,8 +5,34 @@ and their transformations.
""" """
from langchain_core.documents.base import Document from importlib import import_module
from langchain_core.documents.compressor import BaseDocumentCompressor from typing import TYPE_CHECKING
from langchain_core.documents.transformers import BaseDocumentTransformer
if TYPE_CHECKING:
from .base import Document
from .compressor import BaseDocumentCompressor
from .transformers import BaseDocumentTransformer
__all__ = ["Document", "BaseDocumentTransformer", "BaseDocumentCompressor"] __all__ = ["Document", "BaseDocumentTransformer", "BaseDocumentCompressor"]
_dynamic_imports = {
"Document": "base",
"BaseDocumentCompressor": "compressor",
"BaseDocumentTransformer": "transformers",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -1,6 +1,35 @@
"""Embeddings.""" """Embeddings."""
from langchain_core.embeddings.embeddings import Embeddings from importlib import import_module
from langchain_core.embeddings.fake import DeterministicFakeEmbedding, FakeEmbeddings from typing import TYPE_CHECKING
if TYPE_CHECKING:
from langchain_core.embeddings.embeddings import Embeddings
from langchain_core.embeddings.fake import (
DeterministicFakeEmbedding,
FakeEmbeddings,
)
__all__ = ["DeterministicFakeEmbedding", "Embeddings", "FakeEmbeddings"] __all__ = ["DeterministicFakeEmbedding", "Embeddings", "FakeEmbeddings"]
_dynamic_imports = {
"Embeddings": "embeddings",
"DeterministicFakeEmbedding": "fake",
"FakeEmbeddings": "fake",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -4,15 +4,19 @@
This allows us to select examples that are most relevant to the input. This allows us to select examples that are most relevant to the input.
""" """
from langchain_core.example_selectors.base import BaseExampleSelector from importlib import import_module
from langchain_core.example_selectors.length_based import ( from typing import TYPE_CHECKING
LengthBasedExampleSelector,
) if TYPE_CHECKING:
from langchain_core.example_selectors.semantic_similarity import ( from langchain_core.example_selectors.base import BaseExampleSelector
MaxMarginalRelevanceExampleSelector, from langchain_core.example_selectors.length_based import (
SemanticSimilarityExampleSelector, LengthBasedExampleSelector,
sorted_values, )
) from langchain_core.example_selectors.semantic_similarity import (
MaxMarginalRelevanceExampleSelector,
SemanticSimilarityExampleSelector,
sorted_values,
)
__all__ = [ __all__ = [
"BaseExampleSelector", "BaseExampleSelector",
@ -21,3 +25,27 @@ __all__ = [
"SemanticSimilarityExampleSelector", "SemanticSimilarityExampleSelector",
"sorted_values", "sorted_values",
] ]
_dynamic_imports = {
"BaseExampleSelector": "base",
"LengthBasedExampleSelector": "length_based",
"MaxMarginalRelevanceExampleSelector": "semantic_similarity",
"SemanticSimilarityExampleSelector": "semantic_similarity",
"sorted_values": "semantic_similarity",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -5,14 +5,18 @@ a vectorstore while avoiding duplicated content and over-writing content
if it's unchanged. if it's unchanged.
""" """
from langchain_core.indexing.api import IndexingResult, aindex, index from importlib import import_module
from langchain_core.indexing.base import ( from typing import TYPE_CHECKING
DeleteResponse,
DocumentIndex, if TYPE_CHECKING:
InMemoryRecordManager, from langchain_core.indexing.api import IndexingResult, aindex, index
RecordManager, from langchain_core.indexing.base import (
UpsertResponse, DeleteResponse,
) DocumentIndex,
InMemoryRecordManager,
RecordManager,
UpsertResponse,
)
__all__ = [ __all__ = [
"aindex", "aindex",
@ -24,3 +28,30 @@ __all__ = [
"RecordManager", "RecordManager",
"UpsertResponse", "UpsertResponse",
] ]
_dynamic_imports = {
"aindex": "api",
"index": "api",
"IndexingResult": "api",
"DeleteResponse": "base",
"DocumentIndex": "base",
"InMemoryRecordManager": "base",
"RecordManager": "base",
"UpsertResponse": "base",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -41,23 +41,30 @@ https://python.langchain.com/docs/how_to/custom_llm/
""" # noqa: E501 """ # noqa: E501
from langchain_core.language_models.base import ( from importlib import import_module
BaseLanguageModel, from typing import TYPE_CHECKING
LangSmithParams,
LanguageModelInput, if TYPE_CHECKING:
LanguageModelLike, from langchain_core.language_models.base import (
LanguageModelOutput, BaseLanguageModel,
get_tokenizer, LangSmithParams,
) LanguageModelInput,
from langchain_core.language_models.chat_models import BaseChatModel, SimpleChatModel LanguageModelLike,
from langchain_core.language_models.fake import FakeListLLM, FakeStreamingListLLM LanguageModelOutput,
from langchain_core.language_models.fake_chat_models import ( get_tokenizer,
FakeListChatModel, )
FakeMessagesListChatModel, from langchain_core.language_models.chat_models import (
GenericFakeChatModel, BaseChatModel,
ParrotFakeChatModel, SimpleChatModel,
) )
from langchain_core.language_models.llms import LLM, BaseLLM from langchain_core.language_models.fake import FakeListLLM, FakeStreamingListLLM
from langchain_core.language_models.fake_chat_models import (
FakeListChatModel,
FakeMessagesListChatModel,
GenericFakeChatModel,
ParrotFakeChatModel,
)
from langchain_core.language_models.llms import LLM, BaseLLM
__all__ = [ __all__ = [
"BaseLanguageModel", "BaseLanguageModel",
@ -77,3 +84,38 @@ __all__ = [
"GenericFakeChatModel", "GenericFakeChatModel",
"ParrotFakeChatModel", "ParrotFakeChatModel",
] ]
_dynamic_imports = {
"BaseLanguageModel": "base",
"LangSmithParams": "base",
"LanguageModelInput": "base",
"LanguageModelLike": "base",
"LanguageModelOutput": "base",
"get_tokenizer": "base",
"BaseChatModel": "chat_models",
"SimpleChatModel": "chat_models",
"FakeListLLM": "fake",
"FakeStreamingListLLM": "fake",
"FakeListChatModel": "fake_chat_models",
"FakeMessagesListChatModel": "fake_chat_models",
"GenericFakeChatModel": "fake_chat_models",
"ParrotFakeChatModel": "fake_chat_models",
"LLM": "llms",
"BaseLLM": "llms",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -1,7 +1,35 @@
"""**Load** module helps with serialization and deserialization.""" """**Load** module helps with serialization and deserialization."""
from langchain_core.load.dump import dumpd, dumps from importlib import import_module
from langchain_core.load.load import load, loads from typing import TYPE_CHECKING
from langchain_core.load.serializable import Serializable
if TYPE_CHECKING:
from langchain_core.load.dump import dumpd, dumps
from langchain_core.load.load import load, loads
from langchain_core.load.serializable import Serializable
__all__ = ["dumpd", "dumps", "load", "loads", "Serializable"] __all__ = ["dumpd", "dumps", "load", "loads", "Serializable"]
_dynamic_imports = {
"dumpd": "dump",
"dumps": "dump",
"load": "load",
"loads": "load",
"Serializable": "serializable",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -15,42 +15,46 @@
""" # noqa: E501 """ # noqa: E501
from langchain_core.messages.ai import ( from importlib import import_module
AIMessage, from typing import TYPE_CHECKING
AIMessageChunk,
) if TYPE_CHECKING:
from langchain_core.messages.base import ( from langchain_core.messages.ai import (
BaseMessage, AIMessage,
BaseMessageChunk, AIMessageChunk,
merge_content, )
message_to_dict, from langchain_core.messages.base import (
messages_to_dict, BaseMessage,
) BaseMessageChunk,
from langchain_core.messages.chat import ChatMessage, ChatMessageChunk merge_content,
from langchain_core.messages.function import FunctionMessage, FunctionMessageChunk message_to_dict,
from langchain_core.messages.human import HumanMessage, HumanMessageChunk messages_to_dict,
from langchain_core.messages.modifier import RemoveMessage )
from langchain_core.messages.system import SystemMessage, SystemMessageChunk from langchain_core.messages.chat import ChatMessage, ChatMessageChunk
from langchain_core.messages.tool import ( from langchain_core.messages.function import FunctionMessage, FunctionMessageChunk
InvalidToolCall, from langchain_core.messages.human import HumanMessage, HumanMessageChunk
ToolCall, from langchain_core.messages.modifier import RemoveMessage
ToolCallChunk, from langchain_core.messages.system import SystemMessage, SystemMessageChunk
ToolMessage, from langchain_core.messages.tool import (
ToolMessageChunk, InvalidToolCall,
) ToolCall,
from langchain_core.messages.utils import ( ToolCallChunk,
AnyMessage, ToolMessage,
MessageLikeRepresentation, ToolMessageChunk,
_message_from_dict, )
convert_to_messages, from langchain_core.messages.utils import (
convert_to_openai_messages, AnyMessage,
filter_messages, MessageLikeRepresentation,
get_buffer_string, _message_from_dict,
merge_message_runs, convert_to_messages,
message_chunk_to_message, convert_to_openai_messages,
messages_from_dict, filter_messages,
trim_messages, get_buffer_string,
) merge_message_runs,
message_chunk_to_message,
messages_from_dict,
trim_messages,
)
__all__ = [ __all__ = [
"AIMessage", "AIMessage",
@ -86,3 +90,54 @@ __all__ = [
"trim_messages", "trim_messages",
"convert_to_openai_messages", "convert_to_openai_messages",
] ]
_dynamic_imports = {
"AIMessage": "ai",
"AIMessageChunk": "ai",
"BaseMessage": "base",
"BaseMessageChunk": "base",
"merge_content": "base",
"message_to_dict": "base",
"messages_to_dict": "base",
"ChatMessage": "chat",
"ChatMessageChunk": "chat",
"FunctionMessage": "function",
"FunctionMessageChunk": "function",
"HumanMessage": "human",
"HumanMessageChunk": "human",
"RemoveMessage": "modifier",
"SystemMessage": "system",
"SystemMessageChunk": "system",
"InvalidToolCall": "tool",
"ToolCall": "tool",
"ToolCallChunk": "tool",
"ToolMessage": "tool",
"ToolMessageChunk": "tool",
"AnyMessage": "utils",
"MessageLikeRepresentation": "utils",
"_message_from_dict": "utils",
"convert_to_messages": "utils",
"convert_to_openai_messages": "utils",
"filter_messages": "utils",
"get_buffer_string": "utils",
"merge_message_runs": "utils",
"message_chunk_to_message": "utils",
"messages_from_dict": "utils",
"trim_messages": "utils",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -13,30 +13,37 @@
Serializable, Generation, PromptValue Serializable, Generation, PromptValue
""" # noqa: E501 """ # noqa: E501
from langchain_core.output_parsers.base import ( from importlib import import_module
BaseGenerationOutputParser, from typing import TYPE_CHECKING
BaseLLMOutputParser,
BaseOutputParser, if TYPE_CHECKING:
) from langchain_core.output_parsers.base import (
from langchain_core.output_parsers.json import JsonOutputParser, SimpleJsonOutputParser BaseGenerationOutputParser,
from langchain_core.output_parsers.list import ( BaseLLMOutputParser,
CommaSeparatedListOutputParser, BaseOutputParser,
ListOutputParser, )
MarkdownListOutputParser, from langchain_core.output_parsers.json import (
NumberedListOutputParser, JsonOutputParser,
) SimpleJsonOutputParser,
from langchain_core.output_parsers.openai_tools import ( )
JsonOutputKeyToolsParser, from langchain_core.output_parsers.list import (
JsonOutputToolsParser, CommaSeparatedListOutputParser,
PydanticToolsParser, ListOutputParser,
) MarkdownListOutputParser,
from langchain_core.output_parsers.pydantic import PydanticOutputParser NumberedListOutputParser,
from langchain_core.output_parsers.string import StrOutputParser )
from langchain_core.output_parsers.transform import ( from langchain_core.output_parsers.openai_tools import (
BaseCumulativeTransformOutputParser, JsonOutputKeyToolsParser,
BaseTransformOutputParser, JsonOutputToolsParser,
) PydanticToolsParser,
from langchain_core.output_parsers.xml import XMLOutputParser )
from langchain_core.output_parsers.pydantic import PydanticOutputParser
from langchain_core.output_parsers.string import StrOutputParser
from langchain_core.output_parsers.transform import (
BaseCumulativeTransformOutputParser,
BaseTransformOutputParser,
)
from langchain_core.output_parsers.xml import XMLOutputParser
__all__ = [ __all__ = [
"BaseLLMOutputParser", "BaseLLMOutputParser",
@ -57,3 +64,39 @@ __all__ = [
"JsonOutputKeyToolsParser", "JsonOutputKeyToolsParser",
"PydanticToolsParser", "PydanticToolsParser",
] ]
_dynamic_imports = {
"BaseLLMOutputParser": "base",
"BaseGenerationOutputParser": "base",
"BaseOutputParser": "base",
"JsonOutputParser": "json",
"SimpleJsonOutputParser": "json",
"ListOutputParser": "list",
"CommaSeparatedListOutputParser": "list",
"MarkdownListOutputParser": "list",
"NumberedListOutputParser": "list",
"JsonOutputKeyToolsParser": "openai_tools",
"JsonOutputToolsParser": "openai_tools",
"PydanticToolsParser": "openai_tools",
"PydanticOutputParser": "pydantic",
"StrOutputParser": "string",
"BaseTransformOutputParser": "transform",
"BaseCumulativeTransformOutputParser": "transform",
"XMLOutputParser": "xml",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -21,11 +21,18 @@ in the AIMessage object, it is recommended to access it from there rather than
from the `LLMResult` object. from the `LLMResult` object.
""" """
from langchain_core.outputs.chat_generation import ChatGeneration, ChatGenerationChunk from importlib import import_module
from langchain_core.outputs.chat_result import ChatResult from typing import TYPE_CHECKING
from langchain_core.outputs.generation import Generation, GenerationChunk
from langchain_core.outputs.llm_result import LLMResult if TYPE_CHECKING:
from langchain_core.outputs.run_info import RunInfo from langchain_core.outputs.chat_generation import (
ChatGeneration,
ChatGenerationChunk,
)
from langchain_core.outputs.chat_result import ChatResult
from langchain_core.outputs.generation import Generation, GenerationChunk
from langchain_core.outputs.llm_result import LLMResult
from langchain_core.outputs.run_info import RunInfo
__all__ = [ __all__ = [
"ChatGeneration", "ChatGeneration",
@ -36,3 +43,29 @@ __all__ = [
"LLMResult", "LLMResult",
"RunInfo", "RunInfo",
] ]
_dynamic_imports = {
"ChatGeneration": "chat_generation",
"ChatGenerationChunk": "chat_generation",
"ChatResult": "chat_result",
"Generation": "generation",
"GenerationChunk": "generation",
"LLMResult": "llm_result",
"RunInfo": "run_info",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -25,35 +25,41 @@ from multiple components and prompt values. Prompt classes and functions make co
""" # noqa: E501 """ # noqa: E501
from langchain_core.prompts.base import ( from importlib import import_module
BasePromptTemplate, from typing import TYPE_CHECKING
aformat_document,
format_document, if TYPE_CHECKING:
) from langchain_core.prompts.base import (
from langchain_core.prompts.chat import ( BasePromptTemplate,
AIMessagePromptTemplate, aformat_document,
BaseChatPromptTemplate, format_document,
ChatMessagePromptTemplate, )
ChatPromptTemplate, from langchain_core.prompts.chat import (
HumanMessagePromptTemplate, AIMessagePromptTemplate,
MessagesPlaceholder, BaseChatPromptTemplate,
SystemMessagePromptTemplate, ChatMessagePromptTemplate,
) ChatPromptTemplate,
from langchain_core.prompts.few_shot import ( HumanMessagePromptTemplate,
FewShotChatMessagePromptTemplate, MessagesPlaceholder,
FewShotPromptTemplate, SystemMessagePromptTemplate,
) )
from langchain_core.prompts.few_shot_with_templates import FewShotPromptWithTemplates from langchain_core.prompts.few_shot import (
from langchain_core.prompts.loading import load_prompt FewShotChatMessagePromptTemplate,
from langchain_core.prompts.pipeline import PipelinePromptTemplate FewShotPromptTemplate,
from langchain_core.prompts.prompt import PromptTemplate )
from langchain_core.prompts.string import ( from langchain_core.prompts.few_shot_with_templates import (
StringPromptTemplate, FewShotPromptWithTemplates,
check_valid_template, )
get_template_variables, from langchain_core.prompts.loading import load_prompt
jinja2_formatter, from langchain_core.prompts.pipeline import PipelinePromptTemplate
validate_jinja2, from langchain_core.prompts.prompt import PromptTemplate
) from langchain_core.prompts.string import (
StringPromptTemplate,
check_valid_template,
get_template_variables,
jinja2_formatter,
validate_jinja2,
)
__all__ = [ __all__ = [
"AIMessagePromptTemplate", "AIMessagePromptTemplate",
@ -78,3 +84,43 @@ __all__ = [
"jinja2_formatter", "jinja2_formatter",
"validate_jinja2", "validate_jinja2",
] ]
_dynamic_imports = {
"BasePromptTemplate": "base",
"format_document": "base",
"aformat_document": "base",
"AIMessagePromptTemplate": "chat",
"BaseChatPromptTemplate": "chat",
"ChatMessagePromptTemplate": "chat",
"ChatPromptTemplate": "chat",
"HumanMessagePromptTemplate": "chat",
"MessagesPlaceholder": "chat",
"SystemMessagePromptTemplate": "chat",
"FewShotChatMessagePromptTemplate": "few_shot",
"FewShotPromptTemplate": "few_shot",
"FewShotPromptWithTemplates": "few_shot_with_templates",
"load_prompt": "loading",
"PipelinePromptTemplate": "pipeline",
"PromptTemplate": "prompt",
"StringPromptTemplate": "string",
"check_valid_template": "string",
"get_template_variables": "string",
"jinja2_formatter": "string",
"validate_jinja2": "string",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -17,42 +17,46 @@ creating more responsive UX.
This module contains schema and implementation of LangChain Runnables primitives. This module contains schema and implementation of LangChain Runnables primitives.
""" """
from langchain_core.runnables.base import ( from importlib import import_module
Runnable, from typing import TYPE_CHECKING
RunnableBinding,
RunnableGenerator, if TYPE_CHECKING:
RunnableLambda, from langchain_core.runnables.base import (
RunnableMap, Runnable,
RunnableParallel, RunnableBinding,
RunnableSequence, RunnableGenerator,
RunnableSerializable, RunnableLambda,
chain, RunnableMap,
) RunnableParallel,
from langchain_core.runnables.branch import RunnableBranch RunnableSequence,
from langchain_core.runnables.config import ( RunnableSerializable,
RunnableConfig, chain,
ensure_config, )
get_config_list, from langchain_core.runnables.branch import RunnableBranch
patch_config, from langchain_core.runnables.config import (
run_in_executor, RunnableConfig,
) ensure_config,
from langchain_core.runnables.fallbacks import RunnableWithFallbacks get_config_list,
from langchain_core.runnables.history import RunnableWithMessageHistory patch_config,
from langchain_core.runnables.passthrough import ( run_in_executor,
RunnableAssign, )
RunnablePassthrough, from langchain_core.runnables.fallbacks import RunnableWithFallbacks
RunnablePick, from langchain_core.runnables.history import RunnableWithMessageHistory
) from langchain_core.runnables.passthrough import (
from langchain_core.runnables.router import RouterInput, RouterRunnable RunnableAssign,
from langchain_core.runnables.utils import ( RunnablePassthrough,
AddableDict, RunnablePick,
ConfigurableField, )
ConfigurableFieldMultiOption, from langchain_core.runnables.router import RouterInput, RouterRunnable
ConfigurableFieldSingleOption, from langchain_core.runnables.utils import (
ConfigurableFieldSpec, AddableDict,
aadd, ConfigurableField,
add, ConfigurableFieldMultiOption,
) ConfigurableFieldSingleOption,
ConfigurableFieldSpec,
aadd,
add,
)
__all__ = [ __all__ = [
"chain", "chain",
@ -85,3 +89,51 @@ __all__ = [
"aadd", "aadd",
"add", "add",
] ]
_dynamic_imports = {
"chain": "base",
"Runnable": "base",
"RunnableBinding": "base",
"RunnableGenerator": "base",
"RunnableLambda": "base",
"RunnableMap": "base",
"RunnableParallel": "base",
"RunnableSequence": "base",
"RunnableSerializable": "base",
"RunnableBranch": "branch",
"RunnableConfig": "config",
"ensure_config": "config",
"get_config_list": "config",
"patch_config": "config",
"run_in_executor": "config",
"RunnableWithFallbacks": "fallbacks",
"RunnableWithMessageHistory": "history",
"RunnableAssign": "passthrough",
"RunnablePassthrough": "passthrough",
"RunnablePick": "passthrough",
"RouterInput": "router",
"RouterRunnable": "router",
"AddableDict": "utils",
"ConfigurableField": "utils",
"ConfigurableFieldMultiOption": "utils",
"ConfigurableFieldSingleOption": "utils",
"ConfigurableFieldSpec": "utils",
"aadd": "utils",
"add": "utils",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -19,33 +19,37 @@ tool for the job.
from __future__ import annotations from __future__ import annotations
from langchain_core.tools.base import ( from importlib import import_module
FILTERED_ARGS, from typing import TYPE_CHECKING
ArgsSchema,
BaseTool, if TYPE_CHECKING:
BaseToolkit, from langchain_core.tools.base import (
InjectedToolArg, FILTERED_ARGS,
InjectedToolCallId, ArgsSchema,
SchemaAnnotationError, BaseTool,
ToolException, BaseToolkit,
_get_runnable_config_param, InjectedToolArg,
create_schema_from_function, InjectedToolCallId,
) SchemaAnnotationError,
from langchain_core.tools.convert import ( ToolException,
convert_runnable_to_tool, _get_runnable_config_param,
tool, create_schema_from_function,
) )
from langchain_core.tools.render import ( from langchain_core.tools.convert import (
ToolsRenderer, convert_runnable_to_tool,
render_text_description, tool,
render_text_description_and_args, )
) from langchain_core.tools.render import (
from langchain_core.tools.retriever import ( ToolsRenderer,
RetrieverInput, render_text_description,
create_retriever_tool, render_text_description_and_args,
) )
from langchain_core.tools.simple import Tool from langchain_core.tools.retriever import (
from langchain_core.tools.structured import StructuredTool RetrieverInput,
create_retriever_tool,
)
from langchain_core.tools.simple import Tool
from langchain_core.tools.structured import StructuredTool
__all__ = [ __all__ = [
"ArgsSchema", "ArgsSchema",
@ -68,3 +72,41 @@ __all__ = [
"Tool", "Tool",
"StructuredTool", "StructuredTool",
] ]
_dynamic_imports = {
"FILTERED_ARGS": "base",
"ArgsSchema": "base",
"BaseTool": "base",
"BaseToolkit": "base",
"InjectedToolArg": "base",
"InjectedToolCallId": "base",
"SchemaAnnotationError": "base",
"ToolException": "base",
"_get_runnable_config_param": "base",
"create_schema_from_function": "base",
"convert_runnable_to_tool": "convert",
"tool": "convert",
"ToolsRenderer": "render",
"render_text_description": "render",
"render_text_description_and_args": "render",
"RetrieverInput": "retriever",
"create_retriever_tool": "retriever",
"Tool": "simple",
"StructuredTool": "structured",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -8,6 +8,21 @@
--> <name> # Examples: LogStreamCallbackHandler --> <name> # Examples: LogStreamCallbackHandler
""" # noqa: E501 """ # noqa: E501
from importlib import import_module
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from langchain_core.tracers.base import BaseTracer
from langchain_core.tracers.evaluation import EvaluatorCallbackHandler
from langchain_core.tracers.langchain import LangChainTracer
from langchain_core.tracers.log_stream import (
LogStreamCallbackHandler,
RunLog,
RunLogPatch,
)
from langchain_core.tracers.schemas import Run
from langchain_core.tracers.stdout import ConsoleCallbackHandler
__all__ = [ __all__ = [
"BaseTracer", "BaseTracer",
"EvaluatorCallbackHandler", "EvaluatorCallbackHandler",
@ -19,13 +34,29 @@ __all__ = [
"LogStreamCallbackHandler", "LogStreamCallbackHandler",
] ]
from langchain_core.tracers.base import BaseTracer _dynamic_imports = {
from langchain_core.tracers.evaluation import EvaluatorCallbackHandler "BaseTracer": "base",
from langchain_core.tracers.langchain import LangChainTracer "EvaluatorCallbackHandler": "evaluation",
from langchain_core.tracers.log_stream import ( "LangChainTracer": "langchain",
LogStreamCallbackHandler, "LogStreamCallbackHandler": "log_stream",
RunLog, "RunLog": "log_stream",
RunLogPatch, "RunLogPatch": "log_stream",
) "Run": "schemas",
from langchain_core.tracers.schemas import Run "ConsoleCallbackHandler": "stdout",
from langchain_core.tracers.stdout import ConsoleCallbackHandler }
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)

View File

@ -3,32 +3,38 @@
These functions do not depend on any other LangChain module. These functions do not depend on any other LangChain module.
""" """
from langchain_core.utils import image from importlib import import_module
from langchain_core.utils.aiter import abatch_iterate from typing import TYPE_CHECKING
from langchain_core.utils.env import get_from_dict_or_env, get_from_env
from langchain_core.utils.formatting import StrictFormatter, formatter if TYPE_CHECKING:
from langchain_core.utils.input import ( # for type checking and IDE support, we include the imports here
get_bolded_text, # but we don't want to eagerly import them at runtime
get_color_mapping, from langchain_core.utils import image
get_colored_text, from langchain_core.utils.aiter import abatch_iterate
print_text, from langchain_core.utils.env import get_from_dict_or_env, get_from_env
) from langchain_core.utils.formatting import StrictFormatter, formatter
from langchain_core.utils.iter import batch_iterate from langchain_core.utils.input import (
from langchain_core.utils.loading import try_load_from_hub get_bolded_text,
from langchain_core.utils.pydantic import pre_init get_color_mapping,
from langchain_core.utils.strings import comma_list, stringify_dict, stringify_value get_colored_text,
from langchain_core.utils.utils import ( print_text,
build_extra_kwargs, )
check_package_version, from langchain_core.utils.iter import batch_iterate
convert_to_secret_str, from langchain_core.utils.loading import try_load_from_hub
from_env, from langchain_core.utils.pydantic import pre_init
get_pydantic_field_names, from langchain_core.utils.strings import comma_list, stringify_dict, stringify_value
guard_import, from langchain_core.utils.utils import (
mock_now, build_extra_kwargs,
raise_for_status_with_text, check_package_version,
secret_from_env, convert_to_secret_str,
xor_args, from_env,
) get_pydantic_field_names,
guard_import,
mock_now,
raise_for_status_with_text,
secret_from_env,
xor_args,
)
__all__ = [ __all__ = [
"build_extra_kwargs", "build_extra_kwargs",
@ -58,3 +64,48 @@ __all__ = [
"from_env", "from_env",
"secret_from_env", "secret_from_env",
] ]
_dynamic_imports = {
"image": "__module__",
"abatch_iterate": "aiter",
"get_from_dict_or_env": "env",
"get_from_env": "env",
"StrictFormatter": "formatting",
"formatter": "formatting",
"get_bolded_text": "input",
"get_color_mapping": "input",
"get_colored_text": "input",
"print_text": "input",
"batch_iterate": "iter",
"try_load_from_hub": "loading",
"pre_init": "pydantic",
"comma_list": "strings",
"stringify_dict": "strings",
"stringify_value": "strings",
"build_extra_kwargs": "utils",
"check_package_version": "utils",
"convert_to_secret_str": "utils",
"from_env": "utils",
"get_pydantic_field_names": "utils",
"guard_import": "utils",
"mock_now": "utils",
"secret_from_env": "utils",
"xor_args": "utils",
"raise_for_status_with_text": "utils",
}
def __getattr__(attr_name: str) -> object:
module_name = _dynamic_imports.get(attr_name)
package = __spec__.parent # type: ignore[name-defined]
if module_name == "__module__" or module_name is None:
result = import_module(f".{attr_name}", package=package)
else:
module = import_module(f".{module_name}", package=package)
result = getattr(module, attr_name)
globals()[attr_name] = result
return result
def __dir__() -> list[str]:
return list(__all__)