community: replace it with Tencent Cloud SDK (#24172)

Description: The old method will be discontinued; use the official SDK
for more model options.
Issue: None
Dependencies: None
Twitter handle: None

Co-authored-by: trumanyan <trumanyan@tencent.com>
This commit is contained in:
TrumanYan 2024-07-31 21:05:38 +08:00 committed by GitHub
parent 99eb31ec41
commit 096b66db4a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 87 additions and 156 deletions

View File

@ -1,13 +1,7 @@
import base64
import hashlib
import hmac
import json import json
import logging import logging
import time
from typing import Any, Dict, Iterator, List, Mapping, Optional, Type from typing import Any, Dict, Iterator, List, Mapping, Optional, Type
from urllib.parse import urlparse
import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.chat_models import ( from langchain_core.language_models.chat_models import (
BaseChatModel, BaseChatModel,
@ -34,18 +28,15 @@ from langchain_core.utils import (
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
DEFAULT_API_BASE = "https://hunyuan.cloud.tencent.com"
DEFAULT_PATH = "/hyllm/v1/chat/completions"
def _convert_message_to_dict(message: BaseMessage) -> dict: def _convert_message_to_dict(message: BaseMessage) -> dict:
message_dict: Dict[str, Any] message_dict: Dict[str, Any]
if isinstance(message, ChatMessage): if isinstance(message, ChatMessage):
message_dict = {"role": message.role, "content": message.content} message_dict = {"Role": message.role, "Content": message.content}
elif isinstance(message, HumanMessage): elif isinstance(message, HumanMessage):
message_dict = {"role": "user", "content": message.content} message_dict = {"Role": "user", "Content": message.content}
elif isinstance(message, AIMessage): elif isinstance(message, AIMessage):
message_dict = {"role": "assistant", "content": message.content} message_dict = {"Role": "assistant", "Content": message.content}
else: else:
raise TypeError(f"Got unknown type {message}") raise TypeError(f"Got unknown type {message}")
@ -53,20 +44,20 @@ def _convert_message_to_dict(message: BaseMessage) -> dict:
def _convert_dict_to_message(_dict: Mapping[str, Any]) -> BaseMessage: def _convert_dict_to_message(_dict: Mapping[str, Any]) -> BaseMessage:
role = _dict["role"] role = _dict["Role"]
if role == "user": if role == "user":
return HumanMessage(content=_dict["content"]) return HumanMessage(content=_dict["Content"])
elif role == "assistant": elif role == "assistant":
return AIMessage(content=_dict.get("content", "") or "") return AIMessage(content=_dict.get("Content", "") or "")
else: else:
return ChatMessage(content=_dict["content"], role=role) return ChatMessage(content=_dict["Content"], role=role)
def _convert_delta_to_message_chunk( def _convert_delta_to_message_chunk(
_dict: Mapping[str, Any], default_class: Type[BaseMessageChunk] _dict: Mapping[str, Any], default_class: Type[BaseMessageChunk]
) -> BaseMessageChunk: ) -> BaseMessageChunk:
role = _dict.get("role") role = _dict.get("Role")
content = _dict.get("content") or "" content = _dict.get("Content") or ""
if role == "user" or default_class == HumanMessageChunk: if role == "user" or default_class == HumanMessageChunk:
return HumanMessageChunk(content=content) return HumanMessageChunk(content=content)
@ -78,43 +69,13 @@ def _convert_delta_to_message_chunk(
return default_class(content=content) # type: ignore[call-arg] return default_class(content=content) # type: ignore[call-arg]
# signature generation
# https://cloud.tencent.com/document/product/1729/97732#532252ce-e960-48a7-8821-940a9ce2ccf3
def _signature(secret_key: SecretStr, url: str, payload: Dict[str, Any]) -> str:
sorted_keys = sorted(payload.keys())
url_info = urlparse(url)
sign_str = url_info.netloc + url_info.path + "?"
for key in sorted_keys:
value = payload[key]
if isinstance(value, list) or isinstance(value, dict):
value = json.dumps(value, separators=(",", ":"), ensure_ascii=False)
elif isinstance(value, float):
value = "%g" % value
sign_str = sign_str + key + "=" + str(value) + "&"
sign_str = sign_str[:-1]
hmacstr = hmac.new(
key=secret_key.get_secret_value().encode("utf-8"),
msg=sign_str.encode("utf-8"),
digestmod=hashlib.sha1,
).digest()
return base64.b64encode(hmacstr).decode("utf-8")
def _create_chat_result(response: Mapping[str, Any]) -> ChatResult: def _create_chat_result(response: Mapping[str, Any]) -> ChatResult:
generations = [] generations = []
for choice in response["choices"]: for choice in response["Choices"]:
message = _convert_dict_to_message(choice["messages"]) message = _convert_dict_to_message(choice["Message"])
generations.append(ChatGeneration(message=message)) generations.append(ChatGeneration(message=message))
token_usage = response["usage"] token_usage = response["Usage"]
llm_output = {"token_usage": token_usage} llm_output = {"token_usage": token_usage}
return ChatResult(generations=generations, llm_output=llm_output) return ChatResult(generations=generations, llm_output=llm_output)
@ -137,8 +98,6 @@ class ChatHunyuan(BaseChatModel):
def lc_serializable(self) -> bool: def lc_serializable(self) -> bool:
return True return True
hunyuan_api_base: str = Field(default=DEFAULT_API_BASE)
"""Hunyuan custom endpoints"""
hunyuan_app_id: Optional[int] = None hunyuan_app_id: Optional[int] = None
"""Hunyuan App ID""" """Hunyuan App ID"""
hunyuan_secret_id: Optional[str] = None hunyuan_secret_id: Optional[str] = None
@ -149,13 +108,26 @@ class ChatHunyuan(BaseChatModel):
"""Whether to stream the results or not.""" """Whether to stream the results or not."""
request_timeout: int = 60 request_timeout: int = 60
"""Timeout for requests to Hunyuan API. Default is 60 seconds.""" """Timeout for requests to Hunyuan API. Default is 60 seconds."""
query_id: Optional[str] = None
"""Query id for troubleshooting"""
temperature: float = 1.0 temperature: float = 1.0
"""What sampling temperature to use.""" """What sampling temperature to use."""
top_p: float = 1.0 top_p: float = 1.0
"""What probability mass to use.""" """What probability mass to use."""
model: str = "hunyuan-lite"
"""What Model to use.
Optional model:
- hunyuan-lite
- hunyuan-standard
- hunyuan-standard-256K
- hunyuan-pro
- hunyuan-code
- hunyuan-role
- hunyuan-functioncall
- hunyuan-vision
"""
stream_moderation: bool = False
"""Whether to review the results or not when streaming is true."""
enable_enhancement: bool = True
"""Whether to enhancement the results or not."""
model_kwargs: Dict[str, Any] = Field(default_factory=dict) model_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""Holds any model parameters valid for API call not explicitly specified.""" """Holds any model parameters valid for API call not explicitly specified."""
@ -193,12 +165,6 @@ class ChatHunyuan(BaseChatModel):
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
values["hunyuan_api_base"] = get_from_dict_or_env(
values,
"hunyuan_api_base",
"HUNYUAN_API_BASE",
DEFAULT_API_BASE,
)
values["hunyuan_app_id"] = get_from_dict_or_env( values["hunyuan_app_id"] = get_from_dict_or_env(
values, values,
"hunyuan_app_id", "hunyuan_app_id",
@ -216,22 +182,19 @@ class ChatHunyuan(BaseChatModel):
"HUNYUAN_SECRET_KEY", "HUNYUAN_SECRET_KEY",
) )
) )
return values return values
@property @property
def _default_params(self) -> Dict[str, Any]: def _default_params(self) -> Dict[str, Any]:
"""Get the default parameters for calling Hunyuan API.""" """Get the default parameters for calling Hunyuan API."""
normal_params = { normal_params = {
"app_id": self.hunyuan_app_id, "Temperature": self.temperature,
"secret_id": self.hunyuan_secret_id, "TopP": self.top_p,
"temperature": self.temperature, "Model": self.model,
"top_p": self.top_p, "Stream": self.streaming,
"StreamModeration": self.stream_moderation,
"EnableEnhancement": self.enable_enhancement,
} }
if self.query_id is not None:
normal_params["query_id"] = self.query_id
return {**normal_params, **self.model_kwargs} return {**normal_params, **self.model_kwargs}
def _generate( def _generate(
@ -248,13 +211,7 @@ class ChatHunyuan(BaseChatModel):
return generate_from_stream(stream_iter) return generate_from_stream(stream_iter)
res = self._chat(messages, **kwargs) res = self._chat(messages, **kwargs)
return _create_chat_result(json.loads(res.to_json_string()))
response = res.json()
if "error" in response:
raise ValueError(f"Error from Hunyuan api response: {response}")
return _create_chat_result(response)
def _stream( def _stream(
self, self,
@ -266,19 +223,17 @@ class ChatHunyuan(BaseChatModel):
res = self._chat(messages, **kwargs) res = self._chat(messages, **kwargs)
default_chunk_class = AIMessageChunk default_chunk_class = AIMessageChunk
for chunk in res.iter_lines(): for chunk in res:
chunk = chunk.decode(encoding="UTF-8", errors="strict").replace( chunk = chunk.get("data", "")
"data: ", ""
)
if len(chunk) == 0: if len(chunk) == 0:
continue continue
response = json.loads(chunk) response = json.loads(chunk)
if "error" in response: if "error" in response:
raise ValueError(f"Error from Hunyuan api response: {response}") raise ValueError(f"Error from Hunyuan api response: {response}")
for choice in response["choices"]: for choice in response["Choices"]:
chunk = _convert_delta_to_message_chunk( chunk = _convert_delta_to_message_chunk(
choice["delta"], default_chunk_class choice["Delta"], default_chunk_class
) )
default_chunk_class = chunk.__class__ default_chunk_class = chunk.__class__
cg_chunk = ChatGenerationChunk(message=chunk) cg_chunk = ChatGenerationChunk(message=chunk)
@ -286,42 +241,32 @@ class ChatHunyuan(BaseChatModel):
run_manager.on_llm_new_token(chunk.content, chunk=cg_chunk) run_manager.on_llm_new_token(chunk.content, chunk=cg_chunk)
yield cg_chunk yield cg_chunk
def _chat(self, messages: List[BaseMessage], **kwargs: Any) -> requests.Response: def _chat(self, messages: List[BaseMessage], **kwargs: Any) -> Any:
if self.hunyuan_secret_key is None: if self.hunyuan_secret_key is None:
raise ValueError("Hunyuan secret key is not set.") raise ValueError("Hunyuan secret key is not set.")
try:
from tencentcloud.common import credential
from tencentcloud.hunyuan.v20230901 import hunyuan_client, models
except ImportError:
raise ImportError(
"Could not import tencentcloud python package. "
"Please install it with `pip install tencentcloud-sdk-python`."
)
parameters = {**self._default_params, **kwargs} parameters = {**self._default_params, **kwargs}
cred = credential.Credential(
headers = parameters.pop("headers", {}) self.hunyuan_secret_id, str(self.hunyuan_secret_key.get_secret_value())
timestamp = parameters.pop("timestamp", int(time.time())) )
expired = parameters.pop("expired", timestamp + 24 * 60 * 60) client = hunyuan_client.HunyuanClient(cred, "")
req = models.ChatCompletionsRequest()
payload = { params = {
"timestamp": timestamp, "Messages": [_convert_message_to_dict(m) for m in messages],
"expired": expired,
"messages": [_convert_message_to_dict(m) for m in messages],
**parameters, **parameters,
} }
req.from_json_string(json.dumps(params))
if self.streaming: resp = client.ChatCompletions(req)
payload["stream"] = 1 return resp
url = self.hunyuan_api_base + DEFAULT_PATH
res = requests.post(
url=url,
timeout=self.request_timeout,
headers={
"Content-Type": "application/json",
"Authorization": _signature(
secret_key=self.hunyuan_secret_key, url=url, payload=payload
),
**headers,
},
json=payload,
stream=self.streaming,
)
return res
@property @property
def _llm_type(self) -> str: def _llm_type(self) -> str:

View File

@ -1,8 +1,10 @@
import pytest
from langchain_core.messages import AIMessage, HumanMessage from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.chat_models.hunyuan import ChatHunyuan from langchain_community.chat_models.hunyuan import ChatHunyuan
@pytest.mark.requires("tencentcloud-sdk-python")
def test_chat_hunyuan() -> None: def test_chat_hunyuan() -> None:
chat = ChatHunyuan() chat = ChatHunyuan()
message = HumanMessage(content="Hello") message = HumanMessage(content="Hello")
@ -11,6 +13,7 @@ def test_chat_hunyuan() -> None:
assert isinstance(response.content, str) assert isinstance(response.content, str)
@pytest.mark.requires("tencentcloud-sdk-python")
def test_chat_hunyuan_with_temperature() -> None: def test_chat_hunyuan_with_temperature() -> None:
chat = ChatHunyuan(temperature=0.6) chat = ChatHunyuan(temperature=0.6)
message = HumanMessage(content="Hello") message = HumanMessage(content="Hello")
@ -19,6 +22,24 @@ def test_chat_hunyuan_with_temperature() -> None:
assert isinstance(response.content, str) assert isinstance(response.content, str)
@pytest.mark.requires("tencentcloud-sdk-python")
def test_chat_hunyuan_with_model_name() -> None:
chat = ChatHunyuan(model="hunyuan-standard")
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
@pytest.mark.requires("tencentcloud-sdk-python")
def test_chat_hunyuan_with_stream() -> None:
chat = ChatHunyuan(streaming=True)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_extra_kwargs() -> None: def test_extra_kwargs() -> None:
chat = ChatHunyuan(temperature=0.88, top_p=0.7) chat = ChatHunyuan(temperature=0.88, top_p=0.7)
assert chat.temperature == 0.88 assert chat.temperature == 0.88

View File

@ -8,27 +8,25 @@ from langchain_core.messages import (
HumanMessageChunk, HumanMessageChunk,
SystemMessage, SystemMessage,
) )
from langchain_core.pydantic_v1 import SecretStr
from langchain_community.chat_models.hunyuan import ( from langchain_community.chat_models.hunyuan import (
_convert_delta_to_message_chunk, _convert_delta_to_message_chunk,
_convert_dict_to_message, _convert_dict_to_message,
_convert_message_to_dict, _convert_message_to_dict,
_signature,
) )
def test__convert_message_to_dict_human() -> None: def test__convert_message_to_dict_human() -> None:
message = HumanMessage(content="foo") message = HumanMessage(content="foo")
result = _convert_message_to_dict(message) result = _convert_message_to_dict(message)
expected_output = {"role": "user", "content": "foo"} expected_output = {"Role": "user", "Content": "foo"}
assert result == expected_output assert result == expected_output
def test__convert_message_to_dict_ai() -> None: def test__convert_message_to_dict_ai() -> None:
message = AIMessage(content="foo") message = AIMessage(content="foo")
result = _convert_message_to_dict(message) result = _convert_message_to_dict(message)
expected_output = {"role": "assistant", "content": "foo"} expected_output = {"Role": "assistant", "Content": "foo"}
assert result == expected_output assert result == expected_output
@ -47,68 +45,35 @@ def test__convert_message_to_dict_function() -> None:
def test__convert_dict_to_message_human() -> None: def test__convert_dict_to_message_human() -> None:
message_dict = {"role": "user", "content": "foo"} message_dict = {"Role": "user", "Content": "foo"}
result = _convert_dict_to_message(message_dict) result = _convert_dict_to_message(message_dict)
expected_output = HumanMessage(content="foo") expected_output = HumanMessage(content="foo")
assert result == expected_output assert result == expected_output
def test__convert_dict_to_message_ai() -> None: def test__convert_dict_to_message_ai() -> None:
message_dict = {"role": "assistant", "content": "foo"} message_dict = {"Role": "assistant", "Content": "foo"}
result = _convert_dict_to_message(message_dict) result = _convert_dict_to_message(message_dict)
expected_output = AIMessage(content="foo") expected_output = AIMessage(content="foo")
assert result == expected_output assert result == expected_output
def test__convert_dict_to_message_other_role() -> None: def test__convert_dict_to_message_other_role() -> None:
message_dict = {"role": "system", "content": "foo"} message_dict = {"Role": "system", "Content": "foo"}
result = _convert_dict_to_message(message_dict) result = _convert_dict_to_message(message_dict)
expected_output = ChatMessage(role="system", content="foo") expected_output = ChatMessage(role="system", content="foo")
assert result == expected_output assert result == expected_output
def test__convert_delta_to_message_assistant() -> None: def test__convert_delta_to_message_assistant() -> None:
delta = {"role": "assistant", "content": "foo"} delta = {"Role": "assistant", "Content": "foo"}
result = _convert_delta_to_message_chunk(delta, AIMessageChunk) result = _convert_delta_to_message_chunk(delta, AIMessageChunk)
expected_output = AIMessageChunk(content="foo") expected_output = AIMessageChunk(content="foo")
assert result == expected_output assert result == expected_output
def test__convert_delta_to_message_human() -> None: def test__convert_delta_to_message_human() -> None:
delta = {"role": "user", "content": "foo"} delta = {"Role": "user", "Content": "foo"}
result = _convert_delta_to_message_chunk(delta, HumanMessageChunk) result = _convert_delta_to_message_chunk(delta, HumanMessageChunk)
expected_output = HumanMessageChunk(content="foo") expected_output = HumanMessageChunk(content="foo")
assert result == expected_output assert result == expected_output
def test__signature() -> None:
secret_key = SecretStr("YOUR_SECRET_KEY")
url = "https://hunyuan.cloud.tencent.com/hyllm/v1/chat/completions"
result = _signature(
secret_key=secret_key,
url=url,
payload={
"app_id": "YOUR_APP_ID",
"secret_id": "YOUR_SECRET_ID",
"query_id": "test_query_id_cb5d8156-0ce2-45af-86b4-d02f5c26a142",
"messages": [
{
"role": "user",
"content": "You are a helpful assistant that translates English"
" to French.Translate this sentence from English to"
" French. I love programming.",
}
],
"temperature": 0.0,
"top_p": 0.8,
"stream": 1,
"timestamp": 1697738378,
"expired": 1697824778,
},
)
# The signature was generated by the demo provided by Huanyuan.
# https://hunyuan-sdk-1256237915.cos.ap-guangzhou.myqcloud.com/python.zip
expected_output = "MXBvqNCXyxJWfEyBwk1pYBVnxzo="
assert result == expected_output