From 9b7b8e4a1a615800075191fe797098ba0ed389a8 Mon Sep 17 00:00:00 2001 From: Hugh Gao Date: Thu, 6 Mar 2025 00:22:14 +0800 Subject: [PATCH] community: make DashScope models support Partial Mode for text continuation. (#30108) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Description make DashScope models support Partial Mode for text continuation. For text continuation in ChatTongYi, it supports text continuation with a prefix by adding a "partial" argument in AIMessage. The document is [Partial Mode ](https://help.aliyun.com/zh/model-studio/user-guide/partial-mode?spm=a2c4g.11186623.help-menu-2400256.d_1_0_0_8.211e5b77KMH5Pn&scm=20140722.H_2862210._.OR_help-T_cn~zh-V_1). The API example is: ```py import os import dashscope messages = [{ "role": "user", "content": "请对“春天来了,大地”这句话进行续写,来表达春天的美好和作者的喜悦之情" }, { "role": "assistant", "content": "春天来了,大地", "partial": True }] response = dashscope.Generation.call( api_key=os.getenv("DASHSCOPE_API_KEY"), model='qwen-plus', messages=messages, result_format='message', ) print(response.output.choices[0].message.content) ``` --------- Co-authored-by: Chester Curme --- docs/docs/integrations/chat/tongyi.ipynb | 77 ++++++++++++++----- .../langchain_community/chat_models/tongyi.py | 5 ++ .../unit_tests/chat_models/test_tongyi.py | 14 ++++ 3 files changed, 78 insertions(+), 18 deletions(-) diff --git a/docs/docs/integrations/chat/tongyi.ipynb b/docs/docs/integrations/chat/tongyi.ipynb index 8940489d39d..02ee96c1e86 100644 --- a/docs/docs/integrations/chat/tongyi.ipynb +++ b/docs/docs/integrations/chat/tongyi.ipynb @@ -26,22 +26,9 @@ }, { "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Note: you may need to restart the kernel to use updated packages.\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "# Install the package\n", "%pip install --upgrade --quiet dashscope" @@ -49,8 +36,12 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": { + "ExecuteTime": { + "end_time": "2025-03-05T01:11:20.457141Z", + "start_time": "2025-03-05T01:11:18.810160Z" + }, "collapsed": false, "jupyter": { "outputs_hidden": false @@ -66,8 +57,12 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": { + "ExecuteTime": { + "end_time": "2025-03-05T01:11:24.270318Z", + "start_time": "2025-03-05T01:11:24.268064Z" + }, "collapsed": false, "jupyter": { "outputs_hidden": false @@ -266,6 +261,52 @@ "ai_message" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Partial Mode\n", + "Enable the large model to continue generating content from the initial text you provide." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "ExecuteTime": { + "end_time": "2025-03-05T01:31:29.155824Z", + "start_time": "2025-03-05T01:31:27.239667Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "AIMessage(content=' has cast off its heavy cloak of snow, donning instead a vibrant garment of fresh greens and floral hues; it is as if the world has woken from a long slumber, stretching and reveling in the warm caress of the sun. Everywhere I look, there is a symphony of life: birdsong fills the air, bees dance from flower to flower, and a gentle breeze carries the sweet fragrance of blossoms. It is in this season that my heart finds particular joy, for it whispers promises of renewal and growth, reminding me that even after the coldest winters, there will always be a spring to follow.', additional_kwargs={}, response_metadata={'model_name': 'qwen-turbo', 'finish_reason': 'stop', 'request_id': '447283e9-ee31-9d82-8734-af572921cb05', 'token_usage': {'input_tokens': 40, 'output_tokens': 127, 'prompt_tokens_details': {'cached_tokens': 0}, 'total_tokens': 167}}, id='run-6a35a91c-cc12-4afe-b56f-fd26d9035357-0')" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain_community.chat_models.tongyi import ChatTongyi\n", + "from langchain_core.messages import AIMessage, HumanMessage\n", + "\n", + "messages = [\n", + " HumanMessage(\n", + " content=\"\"\"Please continue the sentence \"Spring has arrived, and the earth\" to express the beauty of spring and the author's joy.\"\"\"\n", + " ),\n", + " AIMessage(\n", + " content=\"Spring has arrived, and the earth\", additional_kwargs={\"partial\": True}\n", + " ),\n", + "]\n", + "chatLLM = ChatTongyi()\n", + "ai_message = chatLLM.invoke(messages)\n", + "ai_message" + ] + }, { "cell_type": "markdown", "metadata": {}, diff --git a/libs/community/langchain_community/chat_models/tongyi.py b/libs/community/langchain_community/chat_models/tongyi.py index acb099b18b8..de02213c580 100644 --- a/libs/community/langchain_community/chat_models/tongyi.py +++ b/libs/community/langchain_community/chat_models/tongyi.py @@ -123,6 +123,8 @@ def convert_dict_to_message( tool_calls.append(parsed_tool) except Exception as e: invalid_tool_calls.append(make_invalid_tool_call(value, str(e))) + elif "partial" in _dict and isinstance(_dict["partial"], bool): + additional_kwargs = {"partial": _dict["partial"]} else: additional_kwargs = {} @@ -204,6 +206,9 @@ def convert_message_to_dict(message: BaseMessage) -> dict: message_dict = {"role": "assistant", "content": message.content} if "tool_calls" in message.additional_kwargs: message_dict["tool_calls"] = message.additional_kwargs["tool_calls"] + # support Partial Mode for text continuation + if "partial" in message.additional_kwargs: + message_dict["partial"] = message.additional_kwargs["partial"] elif isinstance(message, SystemMessage): message_dict = {"role": "system", "content": message.content} elif isinstance(message, ToolMessage): diff --git a/libs/community/tests/unit_tests/chat_models/test_tongyi.py b/libs/community/tests/unit_tests/chat_models/test_tongyi.py index 452870f7aa4..afe151d4ed1 100644 --- a/libs/community/tests/unit_tests/chat_models/test_tongyi.py +++ b/libs/community/tests/unit_tests/chat_models/test_tongyi.py @@ -65,6 +65,13 @@ def test__convert_dict_to_message_function_call() -> None: assert result == expected_output +def test__convert_dict_to_message_partial_mode() -> None: + message_dict = {"role": "assistant", "content": "foo", "partial": True} + result = convert_dict_to_message(message_dict) + expected_output = AIMessage(content="foo", additional_kwargs={"partial": True}) + assert result == expected_output + + def test__convert_message_to_dict_human() -> None: message = HumanMessage(content="foo") result = convert_message_to_dict(message) @@ -79,6 +86,13 @@ def test__convert_message_to_dict_ai() -> None: assert result == expected_output +def test__convert_message_to_dict_ai_partial_mode() -> None: + message = AIMessage(content="foo", additional_kwargs={"partial": True}) + result = convert_message_to_dict(message) + expected_output = {"role": "assistant", "content": "foo", "partial": True} + assert result == expected_output + + def test__convert_message_to_dict_system() -> None: message = SystemMessage(content="foo") result = convert_message_to_dict(message)