mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-17 10:13:29 +00:00
wip
This commit is contained in:
parent
a1a4e9f4a5
commit
28b3598744
@ -12,6 +12,7 @@ from __future__ import annotations
|
|||||||
import base64
|
import base64
|
||||||
import inspect
|
import inspect
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
from collections.abc import Iterable, Sequence
|
from collections.abc import Iterable, Sequence
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import (
|
from typing import (
|
||||||
@ -46,6 +47,9 @@ if TYPE_CHECKING:
|
|||||||
from langchain_core.runnables.base import Runnable
|
from langchain_core.runnables.base import Runnable
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _get_type(v: Any) -> str:
|
def _get_type(v: Any) -> str:
|
||||||
"""Get the type associated with the object for serialization purposes."""
|
"""Get the type associated with the object for serialization purposes."""
|
||||||
if isinstance(v, dict) and "type" in v:
|
if isinstance(v, dict) and "type" in v:
|
||||||
@ -993,11 +997,14 @@ def convert_to_openai_messages(
|
|||||||
f"but is missing expected key(s) "
|
f"but is missing expected key(s) "
|
||||||
f"{missing}. Full content block:\n\n{block}"
|
f"{missing}. Full content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
if not coerce:
|
_raise_or_warn(err, coerce)
|
||||||
raise ValueError(err)
|
text = (
|
||||||
else:
|
_try_json_dumps(
|
||||||
logger.warning(err)
|
{k: v for k, v in block.items() if k != "type"}
|
||||||
text = str({k: v for k, v in block.items() if k != "type"}) if len(block) > 1 else ""
|
)
|
||||||
|
if len(block) > 1
|
||||||
|
else ""
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
text = block["text"]
|
text = block["text"]
|
||||||
content.append({"type": block["type"], "text": text})
|
content.append({"type": block["type"], "text": text})
|
||||||
@ -1009,10 +1016,8 @@ def convert_to_openai_messages(
|
|||||||
f"but is missing expected key(s) "
|
f"but is missing expected key(s) "
|
||||||
f"{missing}. Full content block:\n\n{block}"
|
f"{missing}. Full content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
if not coerce:
|
_raise_or_warn(err, coerce)
|
||||||
raise ValueError(err)
|
continue
|
||||||
else:
|
|
||||||
continue
|
|
||||||
content.append(
|
content.append(
|
||||||
{"type": "image_url", "image_url": block["image_url"]}
|
{"type": "image_url", "image_url": block["image_url"]}
|
||||||
)
|
)
|
||||||
@ -1031,10 +1036,8 @@ def convert_to_openai_messages(
|
|||||||
f"but 'source' is missing expected key(s) "
|
f"but 'source' is missing expected key(s) "
|
||||||
f"{missing}. Full content block:\n\n{block}"
|
f"{missing}. Full content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
if not coerce:
|
_raise_or_warn(err, coerce)
|
||||||
raise ValueError(err)
|
continue
|
||||||
else:
|
|
||||||
continue
|
|
||||||
content.append(
|
content.append(
|
||||||
{
|
{
|
||||||
"type": "image_url",
|
"type": "image_url",
|
||||||
@ -1057,10 +1060,8 @@ def convert_to_openai_messages(
|
|||||||
f"but 'image' is missing expected key(s) "
|
f"but 'image' is missing expected key(s) "
|
||||||
f"{missing}. Full content block:\n\n{block}"
|
f"{missing}. Full content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
if not coerce:
|
_raise_or_warn(err, coerce)
|
||||||
raise ValueError(err)
|
continue
|
||||||
else:
|
|
||||||
continue
|
|
||||||
b64_image = _bytes_to_b64_str(image["source"]["bytes"])
|
b64_image = _bytes_to_b64_str(image["source"]["bytes"])
|
||||||
content.append(
|
content.append(
|
||||||
{
|
{
|
||||||
@ -1080,10 +1081,8 @@ def convert_to_openai_messages(
|
|||||||
f"but does not have a 'source' or 'image' key. Full "
|
f"but does not have a 'source' or 'image' key. Full "
|
||||||
f"content block:\n\n{block}"
|
f"content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
if not coerce:
|
_raise_or_warn(err, coerce)
|
||||||
raise ValueError(err)
|
continue
|
||||||
else:
|
|
||||||
continue
|
|
||||||
elif block.get("type") == "tool_use":
|
elif block.get("type") == "tool_use":
|
||||||
if missing := [
|
if missing := [
|
||||||
k for k in ("id", "name", "input") if k not in block
|
k for k in ("id", "name", "input") if k not in block
|
||||||
@ -1094,10 +1093,8 @@ def convert_to_openai_messages(
|
|||||||
f"'tool_use', but is missing expected key(s) "
|
f"'tool_use', but is missing expected key(s) "
|
||||||
f"{missing}. Full content block:\n\n{block}"
|
f"{missing}. Full content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
if not coerce:
|
_raise_or_warn(err, coerce)
|
||||||
raise ValueError(err)
|
continue
|
||||||
else:
|
|
||||||
continue
|
|
||||||
if not any(
|
if not any(
|
||||||
tool_call["id"] == block["id"]
|
tool_call["id"] == block["id"]
|
||||||
for tool_call in cast(AIMessage, message).tool_calls
|
for tool_call in cast(AIMessage, message).tool_calls
|
||||||
@ -1123,10 +1120,8 @@ def convert_to_openai_messages(
|
|||||||
f"'tool_result', but is missing expected key(s) "
|
f"'tool_result', but is missing expected key(s) "
|
||||||
f"{missing}. Full content block:\n\n{block}"
|
f"{missing}. Full content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
if not coerce:
|
_raise_or_warn(err, coerce)
|
||||||
raise ValueError(msg)
|
continue
|
||||||
else:
|
|
||||||
continue
|
|
||||||
tool_message = ToolMessage(
|
tool_message = ToolMessage(
|
||||||
block["content"],
|
block["content"],
|
||||||
tool_call_id=block["tool_use_id"],
|
tool_call_id=block["tool_use_id"],
|
||||||
@ -1146,10 +1141,8 @@ def convert_to_openai_messages(
|
|||||||
f"but does not have a 'json' key. Full "
|
f"but does not have a 'json' key. Full "
|
||||||
f"content block:\n\n{block}"
|
f"content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
if not coerce:
|
_raise_or_warn(msg, coerce)
|
||||||
raise ValueError(msg)
|
continue
|
||||||
else:
|
|
||||||
continue
|
|
||||||
content.append(
|
content.append(
|
||||||
{"type": "text", "text": json.dumps(block["json"])}
|
{"type": "text", "text": json.dumps(block["json"])}
|
||||||
)
|
)
|
||||||
@ -1167,8 +1160,16 @@ def convert_to_openai_messages(
|
|||||||
f"messages[{i}].content[{j}]['guard_content']['text'] "
|
f"messages[{i}].content[{j}]['guard_content']['text'] "
|
||||||
f"key. Full content block:\n\n{block}"
|
f"key. Full content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
raise ValueError(msg)
|
_raise_or_warn(msg, coerce)
|
||||||
text = block["guard_content"]["text"]
|
text = (
|
||||||
|
_try_json_dumps(
|
||||||
|
{k: v for k, v in block.items() if k != "type"}
|
||||||
|
)
|
||||||
|
if len(block) > 1
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
text = block["guard_content"]["text"]
|
||||||
if isinstance(text, dict):
|
if isinstance(text, dict):
|
||||||
text = text["text"]
|
text = text["text"]
|
||||||
content.append({"type": "text", "text": text})
|
content.append({"type": "text", "text": text})
|
||||||
@ -1183,14 +1184,16 @@ def convert_to_openai_messages(
|
|||||||
f"'media' but does not have key(s) {missing}. Full "
|
f"'media' but does not have key(s) {missing}. Full "
|
||||||
f"content block:\n\n{block}"
|
f"content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
raise ValueError(err)
|
_raise_or_warn(err, coerce)
|
||||||
|
continue
|
||||||
if "image" not in block["mime_type"]:
|
if "image" not in block["mime_type"]:
|
||||||
err = (
|
err = (
|
||||||
f"OpenAI messages can only support text and image data."
|
f"OpenAI messages can only support text and image data."
|
||||||
f" Received content block with media of type:"
|
f" Received content block with media of type:"
|
||||||
f" {block['mime_type']}"
|
f" {block['mime_type']}"
|
||||||
)
|
)
|
||||||
raise ValueError(err)
|
_raise_or_warn(err, coerce)
|
||||||
|
continue
|
||||||
b64_image = _bytes_to_b64_str(block["data"])
|
b64_image = _bytes_to_b64_str(block["data"])
|
||||||
content.append(
|
content.append(
|
||||||
{
|
{
|
||||||
@ -1209,7 +1212,8 @@ def convert_to_openai_messages(
|
|||||||
f"Anthropic, Bedrock Converse, or VertexAI format. Full "
|
f"Anthropic, Bedrock Converse, or VertexAI format. Full "
|
||||||
f"content block:\n\n{block}"
|
f"content block:\n\n{block}"
|
||||||
)
|
)
|
||||||
raise ValueError(err)
|
_raise_or_warn(err, coerce)
|
||||||
|
continue
|
||||||
if text_format == "string" and not any(
|
if text_format == "string" and not any(
|
||||||
block["type"] != "text" for block in content
|
block["type"] != "text" for block in content
|
||||||
):
|
):
|
||||||
@ -1432,3 +1436,17 @@ def _convert_to_openai_tool_calls(tool_calls: list[ToolCall]) -> list[dict]:
|
|||||||
}
|
}
|
||||||
for tool_call in tool_calls
|
for tool_call in tool_calls
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _try_json_dumps(o: Any) -> str:
|
||||||
|
try:
|
||||||
|
return json.dumps(o)
|
||||||
|
except Exception:
|
||||||
|
return str(o)
|
||||||
|
|
||||||
|
|
||||||
|
def _raise_or_warn(msg: str, silence: bool) -> None:
|
||||||
|
if not silence:
|
||||||
|
raise ValueError(msg)
|
||||||
|
else:
|
||||||
|
logger.warning(msg)
|
||||||
|
@ -861,7 +861,7 @@ class BaseChatOpenAI(BaseChatModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def _coerce_messages(self, messages: list[BaseMessage]) -> list[BaseMessage]:
|
def _coerce_messages(self, messages: list[BaseMessage]) -> list[BaseMessage]:
|
||||||
return convert_to_messages(convert_to_openai_messages(messages))
|
return convert_to_messages(convert_to_openai_messages(messages, coerce=True))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _identifying_params(self) -> Dict[str, Any]:
|
def _identifying_params(self) -> Dict[str, Any]:
|
||||||
|
Loading…
Reference in New Issue
Block a user