mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-19 13:23:35 +00:00
core: basemessage.text() (#29078)
This commit is contained in:
parent
e2ba336e72
commit
6c1e21d128
@ -86,7 +86,7 @@ class Chat__ModuleName__(BaseChatModel):
|
||||
.. code-block:: python
|
||||
|
||||
for chunk in llm.stream(messages):
|
||||
print(chunk)
|
||||
print(chunk.text(), end="")
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@ -92,6 +92,27 @@ class BaseMessage(Serializable):
|
||||
"""
|
||||
return ["langchain", "schema", "messages"]
|
||||
|
||||
def text(self) -> str:
|
||||
"""Get the text content of the message.
|
||||
|
||||
Returns:
|
||||
The text content of the message.
|
||||
"""
|
||||
if isinstance(self.content, str):
|
||||
return self.content
|
||||
|
||||
# must be a list
|
||||
blocks = [
|
||||
block
|
||||
for block in self.content
|
||||
if isinstance(block, str)
|
||||
or block.get("type") == "text"
|
||||
and isinstance(block.get("text"), str)
|
||||
]
|
||||
return "".join(
|
||||
block if isinstance(block, str) else block["text"] for block in blocks
|
||||
)
|
||||
|
||||
def __add__(self, other: Any) -> ChatPromptTemplate:
|
||||
"""Concatenate this message with another message."""
|
||||
from langchain_core.prompts.chat import ChatPromptTemplate
|
||||
|
@ -1033,3 +1033,61 @@ def test_tool_message_tool_call_id() -> None:
|
||||
ToolMessage("foo", tool_call_id=uuid.uuid4())
|
||||
ToolMessage("foo", tool_call_id=1)
|
||||
ToolMessage("foo", tool_call_id=1.0)
|
||||
|
||||
|
||||
def test_message_text() -> None:
|
||||
# partitions:
|
||||
# message types: [ai], [human], [system], [tool]
|
||||
# content types: [str], [list[str]], [list[dict]], [list[str | dict]]
|
||||
# content: [empty], [single element], [multiple elements]
|
||||
# content dict types: [text], [not text], [no type]
|
||||
|
||||
assert HumanMessage(content="foo").text() == "foo"
|
||||
assert AIMessage(content=[]).text() == ""
|
||||
assert AIMessage(content=["foo", "bar"]).text() == "foobar"
|
||||
assert (
|
||||
AIMessage(
|
||||
content=[
|
||||
{"type": "text", "text": "<thinking>thinking...</thinking>"},
|
||||
{
|
||||
"type": "tool_use",
|
||||
"id": "toolu_01A09q90qw90lq917835lq9",
|
||||
"name": "get_weather",
|
||||
"input": {"location": "San Francisco, CA"},
|
||||
},
|
||||
]
|
||||
).text()
|
||||
== "<thinking>thinking...</thinking>"
|
||||
)
|
||||
assert (
|
||||
SystemMessage(content=[{"type": "text", "text": "foo"}, "bar"]).text()
|
||||
== "foobar"
|
||||
)
|
||||
assert (
|
||||
ToolMessage(
|
||||
content=[
|
||||
{"type": "text", "text": "15 degrees"},
|
||||
{
|
||||
"type": "image",
|
||||
"source": {
|
||||
"type": "base64",
|
||||
"media_type": "image/jpeg",
|
||||
"data": "/9j/4AAQSkZJRg...",
|
||||
},
|
||||
},
|
||||
],
|
||||
tool_call_id="1",
|
||||
).text()
|
||||
== "15 degrees"
|
||||
)
|
||||
assert (
|
||||
AIMessage(content=[{"text": "hi there"}, "hi"]).text() == "hi"
|
||||
) # missing type: text
|
||||
assert AIMessage(content=[{"type": "nottext", "text": "hi"}]).text() == ""
|
||||
assert AIMessage(content=[]).text() == ""
|
||||
assert (
|
||||
AIMessage(
|
||||
content="", tool_calls=[create_tool_call(name="a", args={"b": 1}, id=None)]
|
||||
).text()
|
||||
== ""
|
||||
)
|
||||
|
@ -368,7 +368,7 @@ class ChatAnthropic(BaseChatModel):
|
||||
.. code-block:: python
|
||||
|
||||
for chunk in llm.stream(messages):
|
||||
print(chunk)
|
||||
print(chunk.text(), end="")
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@ -70,7 +70,7 @@ class ChatDeepSeek(BaseChatOpenAI):
|
||||
.. code-block:: python
|
||||
|
||||
for chunk in llm.stream(messages):
|
||||
print(chunk)
|
||||
print(chunk.text(), end="")
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@ -172,7 +172,7 @@ class ChatGroq(BaseChatModel):
|
||||
.. code-block:: python
|
||||
|
||||
for chunk in llm.stream(messages):
|
||||
print(chunk)
|
||||
print(chunk.text(), end="")
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@ -228,7 +228,7 @@ class ChatOllama(BaseChatModel):
|
||||
("human", "Return the words Hello World!"),
|
||||
]
|
||||
for chunk in llm.stream(messages):
|
||||
print(chunk)
|
||||
print(chunk.text(), end="")
|
||||
|
||||
|
||||
.. code-block:: python
|
||||
|
@ -183,7 +183,7 @@ class AzureChatOpenAI(BaseChatOpenAI):
|
||||
.. code-block:: python
|
||||
|
||||
for chunk in llm.stream(messages):
|
||||
print(chunk)
|
||||
print(chunk.text(), end="")
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@ -1616,7 +1616,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override]
|
||||
.. code-block:: python
|
||||
|
||||
for chunk in llm.stream(messages):
|
||||
print(chunk)
|
||||
print(chunk.text(), end="")
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@ -90,7 +90,7 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override]
|
||||
.. code-block:: python
|
||||
|
||||
for chunk in llm.stream(messages):
|
||||
print(chunk)
|
||||
print(chunk.text(), end="")
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user