core: basemessage.text() (#29078)

This commit is contained in:
Erick Friis 2025-02-18 17:45:44 -08:00 committed by GitHub
parent e2ba336e72
commit 6c1e21d128
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 87 additions and 8 deletions

View File

@ -86,7 +86,7 @@ class Chat__ModuleName__(BaseChatModel):
.. code-block:: python
for chunk in llm.stream(messages):
print(chunk)
print(chunk.text(), end="")
.. code-block:: python

View File

@ -92,6 +92,27 @@ class BaseMessage(Serializable):
"""
return ["langchain", "schema", "messages"]
def text(self) -> str:
"""Get the text content of the message.
Returns:
The text content of the message.
"""
if isinstance(self.content, str):
return self.content
# must be a list
blocks = [
block
for block in self.content
if isinstance(block, str)
or block.get("type") == "text"
and isinstance(block.get("text"), str)
]
return "".join(
block if isinstance(block, str) else block["text"] for block in blocks
)
def __add__(self, other: Any) -> ChatPromptTemplate:
"""Concatenate this message with another message."""
from langchain_core.prompts.chat import ChatPromptTemplate

View File

@ -1033,3 +1033,61 @@ def test_tool_message_tool_call_id() -> None:
ToolMessage("foo", tool_call_id=uuid.uuid4())
ToolMessage("foo", tool_call_id=1)
ToolMessage("foo", tool_call_id=1.0)
def test_message_text() -> None:
# partitions:
# message types: [ai], [human], [system], [tool]
# content types: [str], [list[str]], [list[dict]], [list[str | dict]]
# content: [empty], [single element], [multiple elements]
# content dict types: [text], [not text], [no type]
assert HumanMessage(content="foo").text() == "foo"
assert AIMessage(content=[]).text() == ""
assert AIMessage(content=["foo", "bar"]).text() == "foobar"
assert (
AIMessage(
content=[
{"type": "text", "text": "<thinking>thinking...</thinking>"},
{
"type": "tool_use",
"id": "toolu_01A09q90qw90lq917835lq9",
"name": "get_weather",
"input": {"location": "San Francisco, CA"},
},
]
).text()
== "<thinking>thinking...</thinking>"
)
assert (
SystemMessage(content=[{"type": "text", "text": "foo"}, "bar"]).text()
== "foobar"
)
assert (
ToolMessage(
content=[
{"type": "text", "text": "15 degrees"},
{
"type": "image",
"source": {
"type": "base64",
"media_type": "image/jpeg",
"data": "/9j/4AAQSkZJRg...",
},
},
],
tool_call_id="1",
).text()
== "15 degrees"
)
assert (
AIMessage(content=[{"text": "hi there"}, "hi"]).text() == "hi"
) # missing type: text
assert AIMessage(content=[{"type": "nottext", "text": "hi"}]).text() == ""
assert AIMessage(content=[]).text() == ""
assert (
AIMessage(
content="", tool_calls=[create_tool_call(name="a", args={"b": 1}, id=None)]
).text()
== ""
)

View File

@ -368,7 +368,7 @@ class ChatAnthropic(BaseChatModel):
.. code-block:: python
for chunk in llm.stream(messages):
print(chunk)
print(chunk.text(), end="")
.. code-block:: python

View File

@ -70,7 +70,7 @@ class ChatDeepSeek(BaseChatOpenAI):
.. code-block:: python
for chunk in llm.stream(messages):
print(chunk)
print(chunk.text(), end="")
.. code-block:: python

View File

@ -172,7 +172,7 @@ class ChatGroq(BaseChatModel):
.. code-block:: python
for chunk in llm.stream(messages):
print(chunk)
print(chunk.text(), end="")
.. code-block:: python

View File

@ -228,7 +228,7 @@ class ChatOllama(BaseChatModel):
("human", "Return the words Hello World!"),
]
for chunk in llm.stream(messages):
print(chunk)
print(chunk.text(), end="")
.. code-block:: python

View File

@ -183,7 +183,7 @@ class AzureChatOpenAI(BaseChatOpenAI):
.. code-block:: python
for chunk in llm.stream(messages):
print(chunk)
print(chunk.text(), end="")
.. code-block:: python

View File

@ -1616,7 +1616,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override]
.. code-block:: python
for chunk in llm.stream(messages):
print(chunk)
print(chunk.text(), end="")
.. code-block:: python

View File

@ -90,7 +90,7 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override]
.. code-block:: python
for chunk in llm.stream(messages):
print(chunk)
print(chunk.text(), end="")
.. code-block:: python