From aa6dae4a5bd6f62f82497f5c29ec72a55367bbd1 Mon Sep 17 00:00:00 2001 From: Hugh Gao Date: Mon, 10 Mar 2025 20:58:40 +0800 Subject: [PATCH] community: Remove the system message count limit for ChatTongyi. (#30192) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Description The models in DashScope support multiple SystemMessage. Here is the [Doc](https://bailian.console.aliyun.com/model_experience_center/text#/model-market/detail/qwen-long?tabKey=sdk), and the example code on the document page: ```python import os from openai import OpenAI client = OpenAI( api_key=os.getenv("DASHSCOPE_API_KEY"), # 如果您没有配置环境变量,请在此处替换您的API-KEY base_url="https://dashscope.aliyuncs.com/compatible-mode/v1", # 填写DashScope服务base_url ) # 初始化messages列表 completion = client.chat.completions.create( model="qwen-long", messages=[ {'role': 'system', 'content': 'You are a helpful assistant.'}, # 请将 'file-fe-xxx'替换为您实际对话场景所使用的 file-id。 {'role': 'system', 'content': 'fileid://file-fe-xxx'}, {'role': 'user', 'content': '这篇文章讲了什么?'} ], stream=True, stream_options={"include_usage": True} ) full_content = "" for chunk in completion: if chunk.choices and chunk.choices[0].delta.content: # 拼接输出内容 full_content += chunk.choices[0].delta.content print(chunk.model_dump()) print({full_content}) ``` Tip: The example code is for OpenAI, but the document said that it also supports the DataScope API, and I tested it, and it works. ``` Is the Dashscope SDK invocation method compatible? Yes, the Dashscope SDK remains compatible for model invocation. However, file uploads and file-ID retrieval are currently only supported via the OpenAI SDK. The file-ID obtained through this method is also compatible with Dashscope for model invocation. ``` --- libs/community/langchain_community/chat_models/tongyi.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/libs/community/langchain_community/chat_models/tongyi.py b/libs/community/langchain_community/chat_models/tongyi.py index de02213c580..4c44272f271 100644 --- a/libs/community/langchain_community/chat_models/tongyi.py +++ b/libs/community/langchain_community/chat_models/tongyi.py @@ -783,8 +783,6 @@ class ChatTongyi(BaseChatModel): ] if len(system_message_indices) == 1 and system_message_indices[0] != 0: raise ValueError("System message can only be the first message.") - elif len(system_message_indices) > 1: - raise ValueError("There can be only one system message at most.") params["messages"] = message_dicts