mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-19 05:13:46 +00:00
community[patch]: standardize init args (#20210)
Related to https://github.com/langchain-ai/langchain/issues/20085 @baskaryan
This commit is contained in:
parent
a7c5e41443
commit
3729bec1a2
@ -160,7 +160,7 @@ class ChatTongyi(BaseChatModel):
|
|||||||
top_p: float = 0.8
|
top_p: float = 0.8
|
||||||
"""Total probability mass of tokens to consider at each step."""
|
"""Total probability mass of tokens to consider at each step."""
|
||||||
|
|
||||||
dashscope_api_key: Optional[SecretStr] = None
|
dashscope_api_key: Optional[SecretStr] = Field(None, alias="api_key")
|
||||||
"""Dashscope api key provide by Alibaba Cloud."""
|
"""Dashscope api key provide by Alibaba Cloud."""
|
||||||
|
|
||||||
streaming: bool = False
|
streaming: bool = False
|
||||||
@ -169,6 +169,11 @@ class ChatTongyi(BaseChatModel):
|
|||||||
max_retries: int = 10
|
max_retries: int = 10
|
||||||
"""Maximum number of retries to make when generating."""
|
"""Maximum number of retries to make when generating."""
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
"""Configuration for this pydantic object."""
|
||||||
|
|
||||||
|
allow_population_by_field_name = True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _llm_type(self) -> str:
|
def _llm_type(self) -> str:
|
||||||
"""Return type of llm."""
|
"""Return type of llm."""
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test Alibaba Tongyi Chat Model."""
|
"""Test Alibaba Tongyi Chat Model."""
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
from langchain_core.callbacks import CallbackManager
|
from langchain_core.callbacks import CallbackManager
|
||||||
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
|
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
|
||||||
@ -10,6 +11,16 @@ from langchain_community.chat_models.tongyi import ChatTongyi
|
|||||||
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
|
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
|
||||||
|
|
||||||
|
|
||||||
|
def test_initialization() -> None:
|
||||||
|
"""Test chat model initialization."""
|
||||||
|
for model in [
|
||||||
|
ChatTongyi(model_name="qwen-turbo", api_key="xyz"),
|
||||||
|
ChatTongyi(model="qwen-turbo", dashscope_api_key="xyz"),
|
||||||
|
]:
|
||||||
|
assert model.model_name == "qwen-turbo"
|
||||||
|
assert cast(SecretStr, model.dashscope_api_key).get_secret_value() == "xyz"
|
||||||
|
|
||||||
|
|
||||||
def test_api_key_is_string() -> None:
|
def test_api_key_is_string() -> None:
|
||||||
llm = ChatTongyi(dashscope_api_key="secret-api-key")
|
llm = ChatTongyi(dashscope_api_key="secret-api-key")
|
||||||
assert isinstance(llm.dashscope_api_key, SecretStr)
|
assert isinstance(llm.dashscope_api_key, SecretStr)
|
||||||
|
Loading…
Reference in New Issue
Block a user