From e4e28a6ff593dc9a637493bcbc43bc4a07216578 Mon Sep 17 00:00:00 2001 From: maang-h <55082429+maang-h@users.noreply.github.com> Date: Wed, 3 Jul 2024 01:23:32 +0800 Subject: [PATCH] community[patch]: Fix MiniMaxChat validate_environment error (#23770) - **Description:** Fix some issues in MiniMaxChat - Fix `minimax_api_host` not in `values` error - Remove `minimax_group_id` from reading environment variables, the `minimax_group_id` no longer use in MiniMaxChat - Invoke callback prior to yielding token, the issus #16913 --- .../chat_models/minimax.py | 21 ++++++++++++------- .../chat_models/test_minimax.py | 21 +++++++++++++++++++ 2 files changed, 35 insertions(+), 7 deletions(-) create mode 100644 libs/community/tests/integration_tests/chat_models/test_minimax.py diff --git a/libs/community/langchain_community/chat_models/minimax.py b/libs/community/langchain_community/chat_models/minimax.py index a2cf559c418..339a4d83679 100644 --- a/libs/community/langchain_community/chat_models/minimax.py +++ b/libs/community/langchain_community/chat_models/minimax.py @@ -186,15 +186,20 @@ class MiniMaxChat(BaseChatModel): "MINIMAX_API_KEY", ) ) - values["minimax_group_id"] = get_from_dict_or_env( - values, ["minimax_group_id", "group_id"], "MINIMAX_GROUP_ID" - ) + + default_values = { + name: field.default + for name, field in cls.__fields__.items() + if field.default is not None + } + default_values.update(values) + # Get custom api url from environment. values["minimax_api_host"] = get_from_dict_or_env( values, - "minimax_api_host", + ["minimax_api_host", "base_url"], "MINIMAX_API_HOST", - values["minimax_api_host"], + default_values["minimax_api_host"], ) return values @@ -316,9 +321,10 @@ class MiniMaxChat(BaseChatModel): chunk = ChatGenerationChunk( message=chunk, generation_info=generation_info ) - yield chunk if run_manager: run_manager.on_llm_new_token(chunk.text, chunk=chunk) + yield chunk + if finish_reason is not None: break @@ -394,8 +400,9 @@ class MiniMaxChat(BaseChatModel): chunk = ChatGenerationChunk( message=chunk, generation_info=generation_info ) - yield chunk if run_manager: await run_manager.on_llm_new_token(chunk.text, chunk=chunk) + yield chunk + if finish_reason is not None: break diff --git a/libs/community/tests/integration_tests/chat_models/test_minimax.py b/libs/community/tests/integration_tests/chat_models/test_minimax.py new file mode 100644 index 00000000000..339035b8cfc --- /dev/null +++ b/libs/community/tests/integration_tests/chat_models/test_minimax.py @@ -0,0 +1,21 @@ +import os + +from langchain_core.messages import AIMessage + +from langchain_community.chat_models import MiniMaxChat + + +def test_chat_minimax_not_group_id() -> None: + if "MINIMAX_GROUP_ID" in os.environ: + del os.environ["MINIMAX_GROUP_ID"] + chat = MiniMaxChat() # type: ignore[call-arg] + response = chat.invoke("你好呀") + assert isinstance(response, AIMessage) + assert isinstance(response.content, str) + + +def test_chat_minimax_with_stream() -> None: + chat = MiniMaxChat() # type: ignore[call-arg] + for chunk in chat.stream("你好呀"): + assert isinstance(chunk, AIMessage) + assert isinstance(chunk.content, str)