mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-23 03:22:38 +00:00
fix(core): add type key when tracing in some cases (#31825)
This commit is contained in:
parent
af3789b9ed
commit
8acfd677bc
@ -111,8 +111,9 @@ def _generate_response_from_error(error: BaseException) -> list[ChatGeneration]:
|
|||||||
def _format_for_tracing(messages: list[BaseMessage]) -> list[BaseMessage]:
|
def _format_for_tracing(messages: list[BaseMessage]) -> list[BaseMessage]:
|
||||||
"""Format messages for tracing in on_chat_model_start.
|
"""Format messages for tracing in on_chat_model_start.
|
||||||
|
|
||||||
For backward compatibility, we update image content blocks to OpenAI Chat
|
- Update image content blocks to OpenAI Chat Completions format (backward
|
||||||
Completions format.
|
compatibility).
|
||||||
|
- Add "type" key to content blocks that have a single key.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
messages: List of messages to format.
|
messages: List of messages to format.
|
||||||
@ -125,20 +126,36 @@ def _format_for_tracing(messages: list[BaseMessage]) -> list[BaseMessage]:
|
|||||||
message_to_trace = message
|
message_to_trace = message
|
||||||
if isinstance(message.content, list):
|
if isinstance(message.content, list):
|
||||||
for idx, block in enumerate(message.content):
|
for idx, block in enumerate(message.content):
|
||||||
|
if isinstance(block, dict):
|
||||||
|
# Update image content blocks to OpenAI # Chat Completions format.
|
||||||
if (
|
if (
|
||||||
isinstance(block, dict)
|
block.get("type") == "image"
|
||||||
and block.get("type") == "image"
|
|
||||||
and is_data_content_block(block)
|
and is_data_content_block(block)
|
||||||
and block.get("source_type") != "id"
|
and block.get("source_type") != "id"
|
||||||
):
|
):
|
||||||
if message_to_trace is message:
|
if message_to_trace is message:
|
||||||
|
# Shallow copy
|
||||||
message_to_trace = message.model_copy()
|
message_to_trace = message.model_copy()
|
||||||
# Also shallow-copy content
|
|
||||||
message_to_trace.content = list(message_to_trace.content)
|
message_to_trace.content = list(message_to_trace.content)
|
||||||
|
|
||||||
message_to_trace.content[idx] = ( # type: ignore[index] # mypy confused by .model_copy
|
message_to_trace.content[idx] = ( # type: ignore[index] # mypy confused by .model_copy
|
||||||
convert_to_openai_image_block(block)
|
convert_to_openai_image_block(block)
|
||||||
)
|
)
|
||||||
|
elif len(block) == 1 and "type" not in block:
|
||||||
|
# Tracing assumes all content blocks have a "type" key. Here
|
||||||
|
# we add this key if it is missing, and there's an obvious
|
||||||
|
# choice for the type (e.g., a single key in the block).
|
||||||
|
if message_to_trace is message:
|
||||||
|
# Shallow copy
|
||||||
|
message_to_trace = message.model_copy()
|
||||||
|
message_to_trace.content = list(message_to_trace.content)
|
||||||
|
key = next(iter(block))
|
||||||
|
message_to_trace.content[idx] = { # type: ignore[index]
|
||||||
|
"type": key,
|
||||||
|
key: block[key],
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
pass
|
||||||
messages_to_trace.append(message_to_trace)
|
messages_to_trace.append(message_to_trace)
|
||||||
|
|
||||||
return messages_to_trace
|
return messages_to_trace
|
||||||
|
@ -467,6 +467,55 @@ def test_trace_images_in_openai_format() -> None:
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_trace_content_blocks_with_no_type_key() -> None:
|
||||||
|
"""Test that we add a ``type`` key to certain content blocks that don't have one."""
|
||||||
|
llm = ParrotFakeChatModel()
|
||||||
|
messages = [
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"text": "Hello",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cachePoint": {"type": "default"},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
tracer = FakeChatModelStartTracer()
|
||||||
|
response = llm.invoke(messages, config={"callbacks": [tracer]})
|
||||||
|
assert tracer.messages == [
|
||||||
|
[
|
||||||
|
[
|
||||||
|
HumanMessage(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"text": "Hello",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "cachePoint",
|
||||||
|
"cachePoint": {"type": "default"},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
# Test no mutation
|
||||||
|
assert response.content == [
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"text": "Hello",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cachePoint": {"type": "default"},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_extend_support_to_openai_multimodal_formats() -> None:
|
def test_extend_support_to_openai_multimodal_formats() -> None:
|
||||||
"""Test that chat models normalize OpenAI file and audio inputs."""
|
"""Test that chat models normalize OpenAI file and audio inputs."""
|
||||||
llm = ParrotFakeChatModel()
|
llm = ParrotFakeChatModel()
|
||||||
|
Loading…
Reference in New Issue
Block a user