docs: Fix ChatBaichuan, QianfanChatEndpoint, ChatSparkLLM, ChatZhipuAI docs (#25265)

- **Description:** Fix some chat models docs, include:
  - ChatBaichuan
  - QianfanChatEndpoint
  - ChatSparkLLM
  - ChatZhipuAI
This commit is contained in:
maang-h 2024-08-12 04:23:55 +08:00 committed by GitHub
parent 43deed2a95
commit bc60cddc1b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 10 additions and 9 deletions

View File

@ -206,7 +206,7 @@ class ChatBaichuan(BaseChatModel):
Key init args client params: Key init args client params:
api_key: Optional[str] api_key: Optional[str]
MiniMax API key. If not passed in will be read from env var BAICHUAN_API_KEY. Baichuan API key. If not passed in will be read from env var BAICHUAN_API_KEY.
base_url: Optional[str] base_url: Optional[str]
Base URL for API requests. Base URL for API requests.

View File

@ -200,7 +200,7 @@ class QianfanChatEndpoint(BaseChatModel):
("system", "你是一名专业的翻译家,可以将用户的中文翻译为英文。"), ("system", "你是一名专业的翻译家,可以将用户的中文翻译为英文。"),
("human", "我喜欢编程。"), ("human", "我喜欢编程。"),
] ]
qianfan_chat.invoke(message) qianfan_chat.invoke(messages)
.. code-block:: python .. code-block:: python
@ -219,6 +219,7 @@ class QianfanChatEndpoint(BaseChatModel):
.. code-block:: python .. code-block:: python
stream = chat.stream(messages)
full = next(stream) full = next(stream)
for chunk in stream: for chunk in stream:
full += chunk full += chunk

View File

@ -126,9 +126,9 @@ class ChatSparkLLM(BaseChatModel):
from langchain_community.chat_models import ChatSparkLLM from langchain_community.chat_models import ChatSparkLLM
chat = MiniMaxChat( chat = ChatSparkLLM(
api_key=api_key, api_key="your-api-key",
api_secret=ak, api_secret="your-api-secret",
model='Spark4.0 Ultra', model='Spark4.0 Ultra',
# temperature=..., # temperature=...,
# other params... # other params...

View File

@ -199,7 +199,7 @@ class ChatZhipuAI(BaseChatModel):
Key init args completion params: Key init args completion params:
model: Optional[str] model: Optional[str]
Name of OpenAI model to use. Name of ZhipuAI model to use.
temperature: float temperature: float
Sampling temperature. Sampling temperature.
max_tokens: Optional[int] max_tokens: Optional[int]
@ -207,9 +207,9 @@ class ChatZhipuAI(BaseChatModel):
Key init args client params: Key init args client params:
api_key: Optional[str] api_key: Optional[str]
ZhipuAI API key. If not passed in will be read from env var ZHIPUAI_API_KEY. ZhipuAI API key. If not passed in will be read from env var ZHIPUAI_API_KEY.
api_base: Optional[str] api_base: Optional[str]
Base URL for API requests. Base URL for API requests.
See full list of supported init args and their descriptions in the params section. See full list of supported init args and their descriptions in the params section.
@ -255,7 +255,7 @@ class ChatZhipuAI(BaseChatModel):
.. code-block:: python .. code-block:: python
stream = llm.stream(messages) stream = zhipuai_chat.stream(messages)
full = next(stream) full = next(stream)
for chunk in stream: for chunk in stream:
full += chunk full += chunk