delete the default model value from langchain and discard the need fo… (#24915)

- description: I remove the limitation of mandatory existence of
`QIANFAN_AK` and default model name which langchain uses cause there is
already a default model nama underlying `qianfan` SDK powering langchain
component.

---------

Co-authored-by: Chester Curme <chester.curme@gmail.com>
This commit is contained in:
Dobiichi-Origami 2024-08-06 22:11:05 +08:00 committed by GitHub
parent 293a4a78de
commit 061ed250f6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 15 additions and 12 deletions

View File

@ -346,7 +346,9 @@ class QianfanChatEndpoint(BaseChatModel):
client: Any #: :meta private: client: Any #: :meta private:
qianfan_ak: SecretStr = Field(alias="api_key") # It could be empty due to the use of Console API
# And they're not list here
qianfan_ak: Optional[SecretStr] = Field(default=None, alias="api_key")
"""Qianfan API KEY""" """Qianfan API KEY"""
qianfan_sk: Optional[SecretStr] = Field(default=None, alias="secret_key") qianfan_sk: Optional[SecretStr] = Field(default=None, alias="secret_key")
"""Qianfan SECRET KEY""" """Qianfan SECRET KEY"""
@ -365,13 +367,13 @@ class QianfanChatEndpoint(BaseChatModel):
In the case of other model, passing these params will not affect the result. In the case of other model, passing these params will not affect the result.
""" """
model: str = "ERNIE-Lite-8K" model: Optional[str] = Field(default=None)
"""Model name. """Model name.
you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu
preset models are mapping to an endpoint. preset models are mapping to an endpoint.
`model` will be ignored if `endpoint` is set. `model` will be ignored if `endpoint` is set.
Default is ERNIE-Lite-8K. Default is set by `qianfan` SDK, not here
""" """
endpoint: Optional[str] = None endpoint: Optional[str] = None
@ -386,16 +388,12 @@ class QianfanChatEndpoint(BaseChatModel):
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
values["qianfan_ak"] = convert_to_secret_str( values["qianfan_ak"] = convert_to_secret_str(
get_from_dict_or_env( get_from_dict_or_env(
values, values, ["qianfan_ak", "api_key"], "QIANFAN_AK", default=""
["qianfan_ak", "api_key"],
"QIANFAN_AK",
) )
) )
values["qianfan_sk"] = convert_to_secret_str( values["qianfan_sk"] = convert_to_secret_str(
get_from_dict_or_env( get_from_dict_or_env(
values, values, ["qianfan_sk", "secret_key"], "QIANFAN_SK", default=""
["qianfan_sk", "secret_key"],
"QIANFAN_SK",
) )
) )

View File

@ -55,7 +55,7 @@ class QianfanEmbeddingsEndpoint(BaseModel, Embeddings):
chunk_size: int = 16 chunk_size: int = 16
"""Chunk size when multiple texts are input""" """Chunk size when multiple texts are input"""
model: str = "Embedding-V1" model: Optional[str] = Field(default=None)
"""Model name """Model name
you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu

View File

@ -55,12 +55,14 @@ class QianfanLLMEndpoint(LLM):
streaming: Optional[bool] = False streaming: Optional[bool] = False
"""Whether to stream the results or not.""" """Whether to stream the results or not."""
model: str = "ERNIE-Bot-turbo" model: Optional[str] = Field(default=None)
"""Model name. """Model name.
you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu
preset models are mapping to an endpoint. preset models are mapping to an endpoint.
`model` will be ignored if `endpoint` is set `model` will be ignored if `endpoint` is set
Default is set by `qianfan` SDK, not here
""" """
endpoint: Optional[str] = None endpoint: Optional[str] = None

View File

@ -306,7 +306,10 @@ def test_functions_call() -> None:
def test_rate_limit() -> None: def test_rate_limit() -> None:
chat = QianfanChatEndpoint(model="ERNIE-Bot", init_kwargs={"query_per_second": 2}) # type: ignore[call-arg] chat = QianfanChatEndpoint(model="ERNIE-Bot", init_kwargs={"query_per_second": 2}) # type: ignore[call-arg]
assert chat.client._client._rate_limiter._sync_limiter._query_per_second == 2 assert (
chat.client._client._rate_limiter._internal_qps_rate_limiter._sync_limiter._query_per_second
== 1.8
)
responses = chat.batch( responses = chat.batch(
[ [
[HumanMessage(content="Hello")], [HumanMessage(content="Hello")],