wip transform

This commit is contained in:
Bagatur 2024-08-29 11:38:18 -07:00
parent ad009f9c69
commit fc46822bb7
3 changed files with 95 additions and 94 deletions

View File

@ -28,7 +28,7 @@ from typing import (
Type,
Union,
cast,
overload,
overload, Iterator,
)
from langchain_core.messages.ai import AIMessage, AIMessageChunk
@ -41,6 +41,7 @@ from langchain_core.messages.system import SystemMessage, SystemMessageChunk
from langchain_core.messages.tool import ToolMessage, ToolMessageChunk
from langchain_core.messages.tool import (
tool_call as create_tool_call,
tool_call_chunk as create_tool_call_chunk
)
if TYPE_CHECKING:
@ -334,10 +335,7 @@ def convert_to_messages(
return [_convert_to_message(m, copy=copy) for m in messages]
def _runnable_support(*args: Callable, supports_single: bool = False) -> Callable:
if supports_single:
def runnable_support(func: Callable) -> Callable:
def _runnable_generator(func: Callable) -> Callable:
@overload
def wrapped(
messages: Literal[None] = None, **kwargs: Any
@ -352,9 +350,7 @@ def _runnable_support(*args: Callable, supports_single: bool = False) -> Callabl
) -> List[BaseMessage]: ...
@overload
def wrapped(
messages: MessageLikeRepresentation, **kwargs: Any
) -> BaseMessage: ...
def wrapped(messages: MessageLikeRepresentation, **kwargs: Any) -> BaseMessage: ...
def wrapped(
messages: Union[
@ -365,25 +361,25 @@ def _runnable_support(*args: Callable, supports_single: bool = False) -> Callabl
BaseMessage,
List[BaseMessage],
Runnable[
Union[
MessageLikeRepresentation, Sequence[MessageLikeRepresentation]
],
Union[MessageLikeRepresentation, Sequence[MessageLikeRepresentation]],
Union[BaseMessage, List[BaseMessage]],
],
]:
from langchain_core.runnables.base import RunnableLambda
from langchain_core.runnables.base import RunnableGenerator
if messages is not None:
return func(messages, **kwargs)
else:
return RunnableLambda(partial(func, **kwargs), name=func.__name__)
def transform(input_: Iterator, **kwargs: Any) -> Iterator:
for x in input_:
yield func(x, **kwargs)
return RunnableGenerator(partial(transform, **kwargs), name=func.__name__)
wrapped.__doc__ = func.__doc__
return wrapped
else:
def runnable_support(func: Callable) -> Callable:
def _runnable_support(func: Callable) -> Callable:
@overload
def wrapped(
messages: Literal[None] = None, **kwargs: Any
@ -411,8 +407,6 @@ def _runnable_support(*args: Callable, supports_single: bool = False) -> Callabl
wrapped.__doc__ = func.__doc__
return wrapped
return runnable_support(*args) if args else cast(Callable, runnable_support)
@_runnable_support
def filter_messages(
@ -909,13 +903,13 @@ def trim_messages(
)
@_runnable_support(supports_single=True)
@_runnable_generator
def format_messages_as(
messages: Union[MessageLikeRepresentation, Iterable[MessageLikeRepresentation]],
messages: Union[MessageLikeRepresentation, Iterator[MessageLikeRepresentation]],
*,
format: Literal["openai", "anthropic"],
text: Literal["string", "block"],
) -> Union[BaseMessage, List[BaseMessage]]:
) -> Union[BaseMessage, Iterator[BaseMessage]]:
"""Convert message contents into a standard format.
.. versionadded:: 0.2.36
@ -1123,7 +1117,6 @@ def _format_contents_as_openai(
)
# Bedrock converse
elif image := block.get("image"):
raise ValueError("1064")
if missing := [
k for k in ("source", "format") if k not in image
]:
@ -1153,6 +1146,7 @@ def _format_contents_as_openai(
f"content block:\n\n{block}"
)
elif block.get("type") == "tool_use":
if not isinstance(message, BaseMessageChunk):
if missing := [
k for k in ("id", "name", "input") if k not in block
]:
@ -1173,6 +1167,9 @@ def _format_contents_as_openai(
args=block["input"],
)
)
else:
if not message.tool_call_chunks:
message.tool_call_chunks = [create_tool_call_chunk(id=block.get("id"), index=block.get("index"), args=block.get("partial_json"), name=block.get("name"))]
elif block.get("type") == "tool_result":
if missing := [
k for k in ("content", "tool_use_id") if k not in block

View File

@ -3882,6 +3882,7 @@ class RunnableGenerator(Runnable[Input, Output]):
atransform: Optional[
Callable[[AsyncIterator[Input]], AsyncIterator[Output]]
] = None,
name: Optional[str] = None,
) -> None:
"""Initialize a RunnableGenerator.
@ -3909,6 +3910,9 @@ class RunnableGenerator(Runnable[Input, Output]):
)
try:
if name:
self.name = name
else:
self.name = func_for_name.__name__
except AttributeError:
pass

View File

@ -1210,12 +1210,12 @@ def _make_message_chunk_from_anthropic_event(
content_block = event.delta.model_dump()
content_block["index"] = event.index
content_block["type"] = "tool_use"
tool_call_chunk = {
tool_call_chunk = create_tool_call_chunk(**{
"index": event.index,
"id": None,
"name": None,
"args": event.delta.partial_json,
}
})
message_chunk = AIMessageChunk(
content=[content_block],
tool_call_chunks=[tool_call_chunk], # type: ignore