fix(core): refactor new types into top-level v1 namespace (#32403)

This commit is contained in:
ccurme 2025-08-05 10:21:31 -03:00 committed by GitHub
parent deae8cc164
commit c36b123c8c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
54 changed files with 115 additions and 115 deletions

View File

@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Any, Optional, Union
from typing_extensions import Self
from langchain_core.messages.v1 import AIMessage, AIMessageChunk, MessageV1
from langchain_core.v1.messages import AIMessage, AIMessageChunk, MessageV1
if TYPE_CHECKING:
from collections.abc import Sequence

View File

@ -38,15 +38,15 @@ from langchain_core.callbacks.base import (
from langchain_core.callbacks.stdout import StdOutCallbackHandler
from langchain_core.messages import BaseMessage, get_buffer_string
from langchain_core.messages.utils import convert_from_v1_message
from langchain_core.messages.v1 import (
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, LLMResult
from langchain_core.tracers.schemas import Run
from langchain_core.utils.env import env_var_is_set
from langchain_core.v1.messages import (
AIMessage,
AIMessageChunk,
MessageV1,
MessageV1Types,
)
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, LLMResult
from langchain_core.tracers.schemas import Run
from langchain_core.utils.env import env_var_is_set
if TYPE_CHECKING:
from collections.abc import AsyncGenerator, Coroutine, Generator, Sequence

View File

@ -12,8 +12,8 @@ from langchain_core.callbacks.base import BaseCallbackHandler
if TYPE_CHECKING:
from langchain_core.agents import AgentAction, AgentFinish
from langchain_core.messages import BaseMessage
from langchain_core.messages.v1 import AIMessage, MessageV1
from langchain_core.outputs import LLMResult
from langchain_core.v1.messages import AIMessage, MessageV1
class StreamingStdOutCallbackHandler(BaseCallbackHandler):

View File

@ -12,8 +12,8 @@ from langchain_core.callbacks import BaseCallbackHandler
from langchain_core.messages import AIMessage
from langchain_core.messages.ai import UsageMetadata, add_usage
from langchain_core.messages.utils import convert_from_v1_message
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_core.v1.messages import AIMessage as AIMessageV1
class UsageMetadataCallbackHandler(BaseCallbackHandler):

View File

@ -4,7 +4,7 @@ from collections.abc import Sequence
from typing import Optional
from langchain_core.messages import BaseMessage
from langchain_core.messages.v1 import MessageV1
from langchain_core.v1.messages import MessageV1
def _is_openai_data_block(block: dict) -> bool:

View File

@ -28,10 +28,10 @@ from langchain_core.messages import (
MessageLikeRepresentation,
get_buffer_string,
)
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.prompt_values import PromptValue
from langchain_core.runnables import Runnable, RunnableSerializable
from langchain_core.utils import get_pydantic_field_names
from langchain_core.v1.messages import AIMessage as AIMessageV1
if TYPE_CHECKING:
from langchain_core.outputs import LLMResult

View File

@ -13,13 +13,13 @@ from langchain_core.callbacks import (
CallbackManagerForLLMRun,
)
from langchain_core.language_models.chat_models import BaseChatModel, SimpleChatModel
from langchain_core.language_models.v1.chat_models import BaseChatModelV1
from langchain_core.messages import AIMessage, AIMessageChunk, BaseMessage
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.messages.v1 import MessageV1
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.runnables import RunnableConfig
from langchain_core.v1.chat_models import BaseChatModelV1
from langchain_core.v1.messages import AIMessage as AIMessageV1
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
from langchain_core.v1.messages import MessageV1
class FakeMessagesListChatModel(BaseChatModel):

View File

@ -1 +0,0 @@
"""LangChain v1.0 chat models."""

View File

@ -40,12 +40,12 @@ from langchain_core.messages.human import HumanMessage, HumanMessageChunk
from langchain_core.messages.modifier import RemoveMessage
from langchain_core.messages.system import SystemMessage, SystemMessageChunk
from langchain_core.messages.tool import ToolCall, ToolMessage, ToolMessageChunk
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.messages.v1 import HumanMessage as HumanMessageV1
from langchain_core.messages.v1 import MessageV1, MessageV1Types
from langchain_core.messages.v1 import SystemMessage as SystemMessageV1
from langchain_core.messages.v1 import ToolMessage as ToolMessageV1
from langchain_core.v1.messages import AIMessage as AIMessageV1
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
from langchain_core.v1.messages import HumanMessage as HumanMessageV1
from langchain_core.v1.messages import MessageV1, MessageV1Types
from langchain_core.v1.messages import SystemMessage as SystemMessageV1
from langchain_core.v1.messages import ToolMessage as ToolMessageV1
if TYPE_CHECKING:
from langchain_text_splitters import TextSplitter

View File

@ -18,10 +18,10 @@ from typing_extensions import override
from langchain_core.language_models import LanguageModelOutput
from langchain_core.messages import AnyMessage, BaseMessage
from langchain_core.messages.v1 import AIMessage, MessageV1, MessageV1Types
from langchain_core.outputs import ChatGeneration, Generation
from langchain_core.runnables import Runnable, RunnableConfig, RunnableSerializable
from langchain_core.runnables.config import run_in_executor
from langchain_core.v1.messages import AIMessage, MessageV1, MessageV1Types
if TYPE_CHECKING:
from langchain_core.prompt_values import PromptValue

View File

@ -13,7 +13,6 @@ from pydantic.v1 import BaseModel
from typing_extensions import override
from langchain_core.exceptions import OutputParserException
from langchain_core.messages.v1 import AIMessage
from langchain_core.output_parsers.format_instructions import JSON_FORMAT_INSTRUCTIONS
from langchain_core.output_parsers.transform import BaseCumulativeTransformOutputParser
from langchain_core.outputs import Generation
@ -22,6 +21,7 @@ from langchain_core.utils.json import (
parse_json_markdown,
parse_partial_json,
)
from langchain_core.v1.messages import AIMessage
# Union type needs to be last assignment to PydanticBaseModel to make mypy happy.
PydanticBaseModel = Union[BaseModel, pydantic.BaseModel]

View File

@ -12,8 +12,8 @@ from typing import TYPE_CHECKING, TypeVar, Union
from typing_extensions import override
from langchain_core.messages import BaseMessage
from langchain_core.messages.v1 import AIMessage
from langchain_core.output_parsers.transform import BaseTransformOutputParser
from langchain_core.v1.messages import AIMessage
if TYPE_CHECKING:
from collections.abc import AsyncIterator, Iterator

View File

@ -11,13 +11,13 @@ from pydantic.v1 import BaseModel as BaseModelV1
from typing_extensions import override
from langchain_core.exceptions import OutputParserException
from langchain_core.messages.v1 import AIMessage
from langchain_core.output_parsers import (
BaseCumulativeTransformOutputParser,
BaseGenerationOutputParser,
)
from langchain_core.output_parsers.json import parse_partial_json
from langchain_core.outputs import ChatGeneration, Generation
from langchain_core.v1.messages import AIMessage
class OutputFunctionsParser(BaseGenerationOutputParser[Any]):

View File

@ -12,11 +12,11 @@ from langchain_core.exceptions import OutputParserException
from langchain_core.messages import AIMessage, InvalidToolCall
from langchain_core.messages.tool import invalid_tool_call
from langchain_core.messages.tool import tool_call as create_tool_call
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.output_parsers.transform import BaseCumulativeTransformOutputParser
from langchain_core.outputs import ChatGeneration, Generation
from langchain_core.utils.json import parse_partial_json
from langchain_core.utils.pydantic import TypeBaseModel
from langchain_core.v1.messages import AIMessage as AIMessageV1
logger = logging.getLogger(__name__)

View File

@ -8,13 +8,13 @@ from pydantic import SkipValidation
from typing_extensions import override
from langchain_core.exceptions import OutputParserException
from langchain_core.messages.v1 import AIMessage
from langchain_core.output_parsers import JsonOutputParser
from langchain_core.outputs import Generation
from langchain_core.utils.pydantic import (
PydanticBaseModel,
TBaseModel,
)
from langchain_core.v1.messages import AIMessage
class PydanticOutputParser(JsonOutputParser, Generic[TBaseModel]):

View File

@ -12,7 +12,6 @@ from typing import (
from typing_extensions import override
from langchain_core.messages import BaseMessage, BaseMessageChunk
from langchain_core.messages.v1 import AIMessage, AIMessageChunk
from langchain_core.output_parsers.base import BaseOutputParser, T
from langchain_core.outputs import (
ChatGeneration,
@ -21,6 +20,7 @@ from langchain_core.outputs import (
GenerationChunk,
)
from langchain_core.runnables.config import run_in_executor
from langchain_core.v1.messages import AIMessage, AIMessageChunk
if TYPE_CHECKING:
from collections.abc import AsyncIterator, Iterator

View File

@ -13,9 +13,9 @@ from typing_extensions import override
from langchain_core.exceptions import OutputParserException
from langchain_core.messages import BaseMessage
from langchain_core.messages.utils import convert_from_v1_message
from langchain_core.messages.v1 import AIMessage
from langchain_core.output_parsers.transform import BaseTransformOutputParser
from langchain_core.runnables.utils import AddableDict
from langchain_core.v1.messages import AIMessage
XML_FORMAT_INSTRUCTIONS = """The output should be formatted as a XML file.
1. Output should conform to the tags below.

View File

@ -23,11 +23,11 @@ from langchain_core.messages import (
get_buffer_string,
)
from langchain_core.messages import content_blocks as types
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.messages.v1 import HumanMessage as HumanMessageV1
from langchain_core.messages.v1 import MessageV1, ResponseMetadata
from langchain_core.messages.v1 import SystemMessage as SystemMessageV1
from langchain_core.messages.v1 import ToolMessage as ToolMessageV1
from langchain_core.v1.messages import AIMessage as AIMessageV1
from langchain_core.v1.messages import HumanMessage as HumanMessageV1
from langchain_core.v1.messages import MessageV1, ResponseMetadata
from langchain_core.v1.messages import SystemMessage as SystemMessageV1
from langchain_core.v1.messages import ToolMessage as ToolMessageV1
def _convert_to_v1(message: BaseMessage) -> MessageV1:

View File

@ -47,7 +47,6 @@ from langchain_core.callbacks import (
Callbacks,
)
from langchain_core.messages.tool import ToolCall, ToolMessage, ToolOutputMixin
from langchain_core.messages.v1 import ToolMessage as ToolMessageV1
from langchain_core.runnables import (
RunnableConfig,
RunnableSerializable,
@ -69,6 +68,7 @@ from langchain_core.utils.pydantic import (
is_pydantic_v1_subclass,
is_pydantic_v2_subclass,
)
from langchain_core.v1.messages import ToolMessage as ToolMessageV1
if TYPE_CHECKING:
import uuid

View File

@ -16,8 +16,8 @@ from typing_extensions import override
from langchain_core.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
from langchain_core.exceptions import TracerException # noqa: F401
from langchain_core.messages.v1 import AIMessage, AIMessageChunk, MessageV1
from langchain_core.tracers.core import _TracerCore
from langchain_core.v1.messages import AIMessage, AIMessageChunk, MessageV1
if TYPE_CHECKING:
from collections.abc import Sequence

View File

@ -19,12 +19,6 @@ from typing import (
from langchain_core.exceptions import TracerException
from langchain_core.load import dumpd
from langchain_core.messages.utils import convert_from_v1_message
from langchain_core.messages.v1 import (
AIMessage,
AIMessageChunk,
MessageV1,
MessageV1Types,
)
from langchain_core.outputs import (
ChatGeneration,
ChatGenerationChunk,
@ -32,6 +26,12 @@ from langchain_core.outputs import (
LLMResult,
)
from langchain_core.tracers.schemas import Run
from langchain_core.v1.messages import (
AIMessage,
AIMessageChunk,
MessageV1,
MessageV1Types,
)
if TYPE_CHECKING:
from collections.abc import Coroutine, Sequence

View File

@ -19,7 +19,6 @@ from typing_extensions import NotRequired, TypedDict, override
from langchain_core.callbacks.base import AsyncCallbackHandler
from langchain_core.messages import AIMessageChunk, BaseMessage, BaseMessageChunk
from langchain_core.messages.v1 import MessageV1
from langchain_core.outputs import (
ChatGenerationChunk,
GenerationChunk,
@ -39,15 +38,16 @@ from langchain_core.runnables.utils import (
from langchain_core.tracers._streaming import _StreamingCallbackHandler
from langchain_core.tracers.memory_stream import _MemoryStream
from langchain_core.utils.aiter import aclosing, py_anext
from langchain_core.v1.messages import MessageV1
if TYPE_CHECKING:
from collections.abc import AsyncIterator, Iterator, Sequence
from langchain_core.documents import Document
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.runnables import Runnable, RunnableConfig
from langchain_core.tracers.log_stream import LogEntry
from langchain_core.v1.messages import AIMessage as AIMessageV1
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
logger = logging.getLogger(__name__)

View File

@ -22,14 +22,14 @@ from typing_extensions import override
from langchain_core.env import get_runtime_environment
from langchain_core.load import dumpd
from langchain_core.messages.utils import convert_from_v1_message
from langchain_core.messages.v1 import MessageV1Types
from langchain_core.tracers.base import BaseTracer
from langchain_core.tracers.schemas import Run
from langchain_core.v1.messages import MessageV1Types
if TYPE_CHECKING:
from langchain_core.messages import BaseMessage
from langchain_core.messages.v1 import AIMessageChunk, MessageV1
from langchain_core.outputs import ChatGenerationChunk, GenerationChunk
from langchain_core.v1.messages import AIMessageChunk, MessageV1
logger = logging.getLogger(__name__)
_LOGGED = set()

View File

@ -32,9 +32,9 @@ if TYPE_CHECKING:
from collections.abc import AsyncIterator, Iterator, Sequence
from uuid import UUID
from langchain_core.messages.v1 import AIMessageChunk
from langchain_core.runnables.utils import Input, Output
from langchain_core.tracers.schemas import Run
from langchain_core.v1.messages import AIMessageChunk
class LogEntry(TypedDict):

View File

@ -0,0 +1 @@
"""LangChain v1.0 types."""

View File

@ -52,10 +52,6 @@ from langchain_core.messages.utils import (
convert_from_v1_message,
convert_to_messages_v1,
)
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.messages.v1 import HumanMessage as HumanMessageV1
from langchain_core.messages.v1 import MessageV1, add_ai_message_chunks
from langchain_core.outputs import (
ChatGeneration,
ChatGenerationChunk,
@ -71,6 +67,10 @@ from langchain_core.utils.function_calling import (
convert_to_openai_tool,
)
from langchain_core.utils.pydantic import TypeBaseModel, is_basemodel_subclass
from langchain_core.v1.messages import AIMessage as AIMessageV1
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
from langchain_core.v1.messages import HumanMessage as HumanMessageV1
from langchain_core.v1.messages import MessageV1, add_ai_message_chunks
if TYPE_CHECKING:
from langchain_core.output_parsers.base import OutputParserLike

View File

@ -10,9 +10,9 @@ from typing_extensions import override
from langchain_core.callbacks.base import AsyncCallbackHandler
from langchain_core.language_models import GenericFakeChatModel
from langchain_core.messages import AIMessage, BaseMessage
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.messages.v1 import MessageV1
from langchain_core.outputs import ChatGenerationChunk, GenerationChunk
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
from langchain_core.v1.messages import MessageV1
class MyCustomAsyncHandler(AsyncCallbackHandler):

View File

@ -9,7 +9,7 @@ from typing_extensions import override
from langchain_core.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
from langchain_core.messages import BaseMessage
from langchain_core.messages.v1 import MessageV1
from langchain_core.v1.messages import MessageV1
class BaseFakeCallbackHandler(BaseModel):

View File

@ -15,9 +15,9 @@ from langchain_core.language_models import (
ParrotFakeChatModel,
)
from langchain_core.messages import AIMessage, AIMessageChunk, BaseMessage, HumanMessage
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.messages.v1 import MessageV1
from langchain_core.outputs import ChatGenerationChunk, GenerationChunk
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
from langchain_core.v1.messages import MessageV1
from tests.unit_tests.stubs import (
_any_id_ai_message,
_any_id_ai_message_chunk,

View File

@ -25,7 +25,6 @@ from langchain_core.messages import (
HumanMessage,
SystemMessage,
)
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.outputs.llm_result import LLMResult
from langchain_core.tracers import LogStreamCallbackHandler
@ -33,6 +32,7 @@ from langchain_core.tracers.base import BaseTracer
from langchain_core.tracers.context import collect_runs
from langchain_core.tracers.event_stream import _AstreamEventsCallbackHandler
from langchain_core.tracers.schemas import Run
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
from tests.unit_tests.fake.callbacks import (
BaseFakeCallbackHandler,
FakeAsyncCallbackHandler,

View File

@ -1,6 +1,6 @@
"""Unit tests for ResponseMetadata TypedDict."""
from langchain_core.messages.v1 import AIMessage, AIMessageChunk, ResponseMetadata
from langchain_core.v1.messages import AIMessage, AIMessageChunk, ResponseMetadata
class TestResponseMetadata:

View File

@ -8,12 +8,12 @@ from langchain_core.exceptions import OutputParserException
from langchain_core.language_models import GenericFakeChatModel
from langchain_core.language_models.fake_chat_models import GenericFakeChatModelV1
from langchain_core.messages import AIMessage
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.output_parsers import (
BaseGenerationOutputParser,
BaseTransformOutputParser,
)
from langchain_core.outputs import ChatGeneration, Generation
from langchain_core.v1.messages import AIMessage as AIMessageV1
def test_base_generation_parser() -> None:

View File

@ -10,14 +10,14 @@ from langchain_core.messages import (
BaseMessage,
ToolCallChunk,
)
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.output_parsers.openai_tools import (
JsonOutputKeyToolsParser,
JsonOutputToolsParser,
PydanticToolsParser,
)
from langchain_core.outputs import ChatGeneration
from langchain_core.v1.messages import AIMessage as AIMessageV1
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
STREAMED_MESSAGES: list = [
AIMessageChunk(content=""),

View File

@ -2529,7 +2529,7 @@
'title': 'ToolMessage',
'type': 'object',
}),
'langchain_core__messages__v1__AIMessage': dict({
'langchain_core__v1__messages__AIMessage': dict({
'properties': dict({
'content': dict({
'items': dict({
@ -2655,7 +2655,7 @@
'title': 'AIMessage',
'type': 'object',
}),
'langchain_core__messages__v1__AIMessageChunk': dict({
'langchain_core__v1__messages__AIMessageChunk': dict({
'properties': dict({
'content': dict({
'items': dict({
@ -2781,7 +2781,7 @@
'title': 'AIMessageChunk',
'type': 'object',
}),
'langchain_core__messages__v1__HumanMessage': dict({
'langchain_core__v1__messages__HumanMessage': dict({
'properties': dict({
'content': dict({
'items': dict({
@ -2869,7 +2869,7 @@
'title': 'HumanMessage',
'type': 'object',
}),
'langchain_core__messages__v1__SystemMessage': dict({
'langchain_core__v1__messages__SystemMessage': dict({
'properties': dict({
'content': dict({
'items': dict({
@ -2969,7 +2969,7 @@
'title': 'SystemMessage',
'type': 'object',
}),
'langchain_core__messages__v1__ToolMessage': dict({
'langchain_core__v1__messages__ToolMessage': dict({
'properties': dict({
'artifact': dict({
'anyOf': list([
@ -3123,19 +3123,19 @@
]),
}),
dict({
'$ref': '#/$defs/langchain_core__messages__v1__AIMessage',
'$ref': '#/$defs/langchain_core__v1__messages__AIMessage',
}),
dict({
'$ref': '#/$defs/langchain_core__messages__v1__AIMessageChunk',
'$ref': '#/$defs/langchain_core__v1__messages__AIMessageChunk',
}),
dict({
'$ref': '#/$defs/langchain_core__messages__v1__HumanMessage',
'$ref': '#/$defs/langchain_core__v1__messages__HumanMessage',
}),
dict({
'$ref': '#/$defs/langchain_core__messages__v1__SystemMessage',
'$ref': '#/$defs/langchain_core__v1__messages__SystemMessage',
}),
dict({
'$ref': '#/$defs/langchain_core__messages__v1__ToolMessage',
'$ref': '#/$defs/langchain_core__v1__messages__ToolMessage',
}),
]),
'title': 'RunnableParallel<as_list,as_str>Input',

View File

@ -9412,19 +9412,19 @@
]),
}),
dict({
'$ref': '#/definitions/langchain_core__messages__v1__AIMessage',
'$ref': '#/definitions/langchain_core__v1__messages__AIMessage',
}),
dict({
'$ref': '#/definitions/langchain_core__messages__v1__AIMessageChunk',
'$ref': '#/definitions/langchain_core__v1__messages__AIMessageChunk',
}),
dict({
'$ref': '#/definitions/langchain_core__messages__v1__HumanMessage',
'$ref': '#/definitions/langchain_core__v1__messages__HumanMessage',
}),
dict({
'$ref': '#/definitions/langchain_core__messages__v1__SystemMessage',
'$ref': '#/definitions/langchain_core__v1__messages__SystemMessage',
}),
dict({
'$ref': '#/definitions/langchain_core__messages__v1__ToolMessage',
'$ref': '#/definitions/langchain_core__v1__messages__ToolMessage',
}),
]),
'definitions': dict({
@ -11521,7 +11521,7 @@
'title': 'ToolMessage',
'type': 'object',
}),
'langchain_core__messages__v1__AIMessage': dict({
'langchain_core__v1__messages__AIMessage': dict({
'properties': dict({
'content': dict({
'items': dict({
@ -11646,7 +11646,7 @@
'title': 'AIMessage',
'type': 'object',
}),
'langchain_core__messages__v1__AIMessageChunk': dict({
'langchain_core__v1__messages__AIMessageChunk': dict({
'properties': dict({
'content': dict({
'items': dict({
@ -11771,7 +11771,7 @@
'title': 'AIMessageChunk',
'type': 'object',
}),
'langchain_core__messages__v1__HumanMessage': dict({
'langchain_core__v1__messages__HumanMessage': dict({
'properties': dict({
'content': dict({
'items': dict({
@ -11858,7 +11858,7 @@
'title': 'HumanMessage',
'type': 'object',
}),
'langchain_core__messages__v1__SystemMessage': dict({
'langchain_core__v1__messages__SystemMessage': dict({
'properties': dict({
'content': dict({
'items': dict({
@ -11957,7 +11957,7 @@
'title': 'SystemMessage',
'type': 'object',
}),
'langchain_core__messages__v1__ToolMessage': dict({
'langchain_core__v1__messages__ToolMessage': dict({
'properties': dict({
'artifact': dict({
'anyOf': list([

View File

@ -35,9 +35,9 @@ from langchain_core.messages.content_blocks import KNOWN_BLOCK_TYPES, ContentBlo
from langchain_core.messages.tool import invalid_tool_call as create_invalid_tool_call
from langchain_core.messages.tool import tool_call as create_tool_call
from langchain_core.messages.tool import tool_call_chunk as create_tool_call_chunk
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.utils._merge import merge_lists
from langchain_core.v1.messages import AIMessage as AIMessageV1
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
def test_message_init() -> None:

View File

@ -37,7 +37,6 @@ from langchain_core.callbacks.manager import (
from langchain_core.documents import Document
from langchain_core.messages import ToolCall, ToolMessage
from langchain_core.messages.tool import ToolOutputMixin
from langchain_core.messages.v1 import ToolMessage as ToolMessageV1
from langchain_core.retrievers import BaseRetriever
from langchain_core.runnables import (
Runnable,
@ -69,6 +68,7 @@ from langchain_core.utils.pydantic import (
_create_subset_model,
create_model_v2,
)
from langchain_core.v1.messages import ToolMessage as ToolMessageV1
from tests.unit_tests.fake.callbacks import FakeCallbackHandler
from tests.unit_tests.pydantic_utils import _schema
from tests.unit_tests.stubs import AnyStr

View File

@ -12,11 +12,11 @@ from freezegun import freeze_time
from langchain_core.callbacks import AsyncCallbackManager
from langchain_core.exceptions import TracerException
from langchain_core.messages import HumanMessage
from langchain_core.messages.v1 import HumanMessage as HumanMessageV1
from langchain_core.messages.v1 import MessageV1
from langchain_core.outputs import LLMResult
from langchain_core.tracers.base import AsyncBaseTracer
from langchain_core.tracers.schemas import Run
from langchain_core.v1.messages import HumanMessage as HumanMessageV1
from langchain_core.v1.messages import MessageV1
if TYPE_CHECKING:
from langchain_core.messages import BaseMessage

View File

@ -15,12 +15,12 @@ from langsmith import Client, traceable
from langchain_core.callbacks import CallbackManager
from langchain_core.exceptions import TracerException
from langchain_core.messages import HumanMessage
from langchain_core.messages.v1 import HumanMessage as HumanMessageV1
from langchain_core.messages.v1 import MessageV1
from langchain_core.outputs import LLMResult
from langchain_core.runnables import chain as as_runnable
from langchain_core.tracers.base import BaseTracer
from langchain_core.tracers.schemas import Run
from langchain_core.v1.messages import HumanMessage as HumanMessageV1
from langchain_core.v1.messages import MessageV1
if TYPE_CHECKING:
from langchain_core.messages import BaseMessage

View File

@ -9,8 +9,8 @@ from langchain_core.messages import (
BaseMessage,
)
from langchain_core.messages.utils import convert_from_v1_message
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.outputs import ChatGeneration, Generation
from langchain_core.v1.messages import AIMessage as AIMessageV1
from typing_extensions import override
from langchain.agents.agent import AgentOutputParser

View File

@ -3,8 +3,8 @@ from typing import Union
from langchain_core.agents import AgentAction, AgentFinish
from langchain_core.messages import BaseMessage
from langchain_core.messages.utils import convert_from_v1_message
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.outputs import ChatGeneration, Generation
from langchain_core.v1.messages import AIMessage as AIMessageV1
from typing_extensions import override
from langchain.agents.agent import MultiActionAgentOutputParser

View File

@ -10,8 +10,8 @@ from langchain_core.messages import (
ToolCall,
)
from langchain_core.messages.utils import convert_from_v1_message
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.outputs import ChatGeneration, Generation
from langchain_core.v1.messages import AIMessage as AIMessageV1
from typing_extensions import override
from langchain.agents.agent import MultiActionAgentOutputParser

View File

@ -5,8 +5,8 @@ from collections.abc import AsyncIterator
from typing import Any, Literal, Union, cast
from langchain_core.callbacks import AsyncCallbackHandler
from langchain_core.messages.v1 import AIMessage
from langchain_core.outputs import LLMResult
from langchain_core.v1.messages import AIMessage
from typing_extensions import override
# TODO If used by two LLM runs in parallel this won't work as expected

View File

@ -2,8 +2,8 @@ from __future__ import annotations
from typing import Any, Optional, Union
from langchain_core.messages.v1 import AIMessage
from langchain_core.outputs import LLMResult
from langchain_core.v1.messages import AIMessage
from typing_extensions import override
from langchain.callbacks.streaming_aiter import AsyncIteratorCallbackHandler

View File

@ -7,8 +7,8 @@ from uuid import UUID
from langchain_core.callbacks import base as base_callbacks
from langchain_core.documents import Document
from langchain_core.messages.v1 import AIMessage
from langchain_core.outputs import LLMResult
from langchain_core.v1.messages import AIMessage
from typing_extensions import override

View File

@ -6,7 +6,7 @@ from uuid import UUID
from langchain_core.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
from langchain_core.messages import BaseMessage
from langchain_core.messages.v1 import MessageV1
from langchain_core.v1.messages import MessageV1
from pydantic import BaseModel
from typing_extensions import override

View File

@ -6,9 +6,9 @@ from uuid import UUID
from langchain_core.callbacks.base import AsyncCallbackHandler
from langchain_core.messages import AIMessage, AIMessageChunk, BaseMessage
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.messages.v1 import MessageV1
from langchain_core.outputs import ChatGenerationChunk, GenerationChunk
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
from langchain_core.v1.messages import MessageV1
from typing_extensions import override
from tests.unit_tests.llms.fake_chat_model import GenericFakeChatModel

View File

@ -73,7 +73,7 @@ from typing import Any, Literal, Optional, Union, cast
from langchain_core.messages import AIMessage, is_data_content_block
from langchain_core.messages import content_blocks as types
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.v1.messages import AIMessage as AIMessageV1
_FUNCTION_CALL_IDS_MAP_KEY = "__openai_function_call_ids__"

View File

@ -38,11 +38,6 @@ from langchain_core.callbacks import (
)
from langchain_core.language_models import LanguageModelInput
from langchain_core.language_models.chat_models import LangSmithParams
from langchain_core.language_models.v1.chat_models import (
BaseChatModelV1,
agenerate_from_stream,
generate_from_stream,
)
from langchain_core.messages import (
InvalidToolCall,
ToolCall,
@ -55,12 +50,6 @@ from langchain_core.messages.ai import (
UsageMetadata,
)
from langchain_core.messages.tool import tool_call_chunk
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.messages.v1 import HumanMessage as HumanMessageV1
from langchain_core.messages.v1 import MessageV1, ResponseMetadata
from langchain_core.messages.v1 import SystemMessage as SystemMessageV1
from langchain_core.messages.v1 import ToolMessage as ToolMessageV1
from langchain_core.output_parsers import JsonOutputParser, PydanticOutputParser
from langchain_core.output_parsers.openai_tools import (
JsonOutputKeyToolsParser,
@ -88,6 +77,17 @@ from langchain_core.utils.pydantic import (
is_basemodel_subclass,
)
from langchain_core.utils.utils import _build_model_kwargs, from_env, secret_from_env
from langchain_core.v1.chat_models import (
BaseChatModelV1,
agenerate_from_stream,
generate_from_stream,
)
from langchain_core.v1.messages import AIMessage as AIMessageV1
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
from langchain_core.v1.messages import HumanMessage as HumanMessageV1
from langchain_core.v1.messages import MessageV1, ResponseMetadata
from langchain_core.v1.messages import SystemMessage as SystemMessageV1
from langchain_core.v1.messages import ToolMessage as ToolMessageV1
from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator
from pydantic.v1 import BaseModel as BaseModelV1
from typing_extensions import Self

View File

@ -14,9 +14,9 @@ from langchain_core.messages import (
HumanMessage,
MessageLikeRepresentation,
)
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.messages.v1 import HumanMessage as HumanMessageV1
from langchain_core.v1.messages import AIMessage as AIMessageV1
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
from langchain_core.v1.messages import HumanMessage as HumanMessageV1
from pydantic import BaseModel
from typing_extensions import TypedDict

View File

@ -22,11 +22,11 @@ from langchain_core.messages import (
)
from langchain_core.messages import content_blocks as types
from langchain_core.messages.ai import UsageMetadata
from langchain_core.messages.v1 import AIMessage as AIMessageV1
from langchain_core.outputs import ChatGeneration, ChatResult
from langchain_core.runnables import RunnableLambda
from langchain_core.tracers.base import BaseTracer
from langchain_core.tracers.schemas import Run
from langchain_core.v1.messages import AIMessage as AIMessageV1
from openai.types.responses import ResponseOutputMessage, ResponseReasoningItem
from openai.types.responses.response import IncompleteDetails, Response, ResponseUsage
from openai.types.responses.response_error import ResponseError

View File

@ -2,7 +2,7 @@ from typing import Any, Optional
from unittest.mock import MagicMock, patch
from langchain_core.messages import AIMessageChunk, BaseMessageChunk
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from langchain_core.v1.messages import AIMessageChunk as AIMessageChunkV1
from openai.types.responses import (
ResponseCompletedEvent,
ResponseContentPartAddedEvent,

View File

@ -6,7 +6,7 @@ from uuid import UUID
from langchain_core.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
from langchain_core.messages import BaseMessage
from langchain_core.messages.v1 import MessageV1
from langchain_core.v1.messages import MessageV1
from pydantic import BaseModel