docs: update baidu_qianfan_endpoint.ipynb doc (#15940)

- **Description:** Updated the docs for the chat integration module
baidu_qianfan_endpoint.ipynb
  - **Issue:**  #15664 
  - **Dependencies:**N/A
This commit is contained in:
Bigtable123 2024-01-16 03:13:21 +08:00 committed by GitHub
parent 21e0df937f
commit 7306032dcf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -53,9 +53,16 @@
"- AquilaChat-7B" "- AquilaChat-7B"
] ]
}, },
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Set up"
]
},
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 1,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
@ -65,83 +72,105 @@
"from langchain_community.chat_models import QianfanChatEndpoint\n", "from langchain_community.chat_models import QianfanChatEndpoint\n",
"from langchain_core.language_models.chat_models import HumanMessage\n", "from langchain_core.language_models.chat_models import HumanMessage\n",
"\n", "\n",
"os.environ[\"QIANFAN_AK\"] = \"your_ak\"\n", "os.environ[\"QIANFAN_AK\"] = \"Your_api_key\"\n",
"os.environ[\"QIANFAN_SK\"] = \"your_sk\"\n", "os.environ[\"QIANFAN_SK\"] = \"You_secret_Key\""
"\n", ]
"chat = QianfanChatEndpoint(\n", },
" streaming=True,\n", {
")\n", "cell_type": "markdown",
"res = chat([HumanMessage(content=\"write a funny joke\")])" "metadata": {},
"source": [
"## Usage"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 6, "execution_count": 2,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
"name": "stderr", "data": {
"output_type": "stream", "text/plain": [
"text": [ "AIMessage(content='您好!请问您需要什么帮助?我将尽力回答您的问题。')"
"[INFO] [09-15 20:00:36] logging.py:55 [t:139698882193216]: requesting llm api endpoint: /chat/eb-instant\n", ]
"[INFO] [09-15 20:00:37] logging.py:55 [t:139698882193216]: async requesting llm api endpoint: /chat/eb-instant\n" },
] "execution_count": 2,
}, "metadata": {},
"output_type": "execute_result"
}
],
"source": [
"chat = QianfanChatEndpoint(streaming=True)\n",
"messages = [HumanMessage(content=\"Hello\")]\n",
"chat.invoke(messages)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"AIMessage(content='您好!有什么我可以帮助您的吗?')"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"await chat.ainvoke(messages)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[AIMessage(content='您好!有什么我可以帮助您的吗?')]"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"chat.batch([messages])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Streaming"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{ {
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"chat resp: content='您好,您似乎输入' additional_kwargs={} example=False\n", "您好!有什么我可以帮助您的吗?\n"
"chat resp: content='了一个话题标签,请问需要我帮您找到什么资料或者帮助您解答什么问题吗?' additional_kwargs={} example=False\n",
"chat resp: content='' additional_kwargs={} example=False\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"[INFO] [09-15 20:00:39] logging.py:55 [t:139698882193216]: async requesting llm api endpoint: /chat/eb-instant\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"generations=[[ChatGeneration(text=\"The sea is a vast expanse of water that covers much of the Earth's surface. It is a source of travel, trade, and entertainment, and is also a place of scientific exploration and marine conservation. The sea is an important part of our world, and we should cherish and protect it.\", generation_info={'finish_reason': 'finished'}, message=AIMessage(content=\"The sea is a vast expanse of water that covers much of the Earth's surface. It is a source of travel, trade, and entertainment, and is also a place of scientific exploration and marine conservation. The sea is an important part of our world, and we should cherish and protect it.\", additional_kwargs={}, example=False))]] llm_output={} run=[RunInfo(run_id=UUID('d48160a6-5960-4c1d-8a0e-90e6b51a209b'))]\n",
"astream content='The sea is a vast' additional_kwargs={} example=False\n",
"astream content=' expanse of water, a place of mystery and adventure. It is the source of many cultures and civilizations, and a center of trade and exploration. The sea is also a source of life and beauty, with its unique marine life and diverse' additional_kwargs={} example=False\n",
"astream content=' coral reefs. Whether you are swimming, diving, or just watching the sea, it is a place that captivates the imagination and transforms the spirit.' additional_kwargs={} example=False\n"
] ]
} }
], ],
"source": [ "source": [
"from langchain.schema import HumanMessage\n", "try:\n",
"from langchain_community.chat_models import QianfanChatEndpoint\n", " for chunk in chat.stream(messages):\n",
"\n", " print(chunk.content, end=\"\", flush=True)\n",
"chatLLM = QianfanChatEndpoint()\n", "except TypeError as e:\n",
"res = chatLLM.stream([HumanMessage(content=\"hi\")], streaming=True)\n", " print(\"\")"
"for r in res:\n",
" print(\"chat resp:\", r)\n",
"\n",
"\n",
"async def run_aio_generate():\n",
" resp = await chatLLM.agenerate(\n",
" messages=[[HumanMessage(content=\"write a 20 words sentence about sea.\")]]\n",
" )\n",
" print(resp)\n",
"\n",
"\n",
"await run_aio_generate()\n",
"\n",
"\n",
"async def run_aio_stream():\n",
" async for res in chatLLM.astream(\n",
" [HumanMessage(content=\"write a 20 words sentence about sea.\")]\n",
" ):\n",
" print(\"astream\", res)\n",
"\n",
"\n",
"await run_aio_stream()"
] ]
}, },
{ {
@ -151,39 +180,36 @@
"source": [ "source": [
"## Use different models in Qianfan\n", "## Use different models in Qianfan\n",
"\n", "\n",
"In the case you want to deploy your own model based on Ernie Bot or third-party open-source model, you could follow these steps:\n", "The default model is ERNIE-Bot-turbo, in the case you want to deploy your own model based on Ernie Bot or third-party open-source model, you could follow these steps:\n",
"\n", "\n",
"- 1. Optional, if the model are included in the default models, skip itDeploy your model in Qianfan Console, get your own customized deploy endpoint.\n", "1. (Optional, if the model are included in the default models, skip it) Deploy your model in Qianfan Console, get your own customized deploy endpoint.\n",
"- 2. Set up the field called `endpoint` in the initialization:" "2. Set up the field called `endpoint` in the initialization:"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 7, "execution_count": 6,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
"name": "stderr", "data": {
"output_type": "stream", "text/plain": [
"text": [ "AIMessage(content='Hello可以回答问题了我会竭尽全力为您解答请问有什么问题吗')"
"[INFO] [09-15 20:00:50] logging.py:55 [t:139698882193216]: requesting llm api endpoint: /chat/bloomz_7b1\n" ]
] },
}, "execution_count": 6,
{ "metadata": {},
"name": "stdout", "output_type": "execute_result"
"output_type": "stream",
"text": [
"content='你好!很高兴见到你。' additional_kwargs={} example=False\n"
]
} }
], ],
"source": [ "source": [
"chatBloom = QianfanChatEndpoint(\n", "chatBot = QianfanChatEndpoint(\n",
" streaming=True,\n", " streaming=True,\n",
" model=\"BLOOMZ-7B\",\n", " model=\"ERNIE-Bot\",\n",
")\n", ")\n",
"res = chatBloom([HumanMessage(content=\"hi\")])\n", "\n",
"print(res)" "messages = [HumanMessage(content=\"Hello\")]\n",
"chatBot.invoke(messages)"
] ]
}, },
{ {
@ -202,35 +228,25 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 8, "execution_count": 7,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
"name": "stderr", "data": {
"output_type": "stream", "text/plain": [
"text": [ "AIMessage(content='您好!有什么我可以帮助您的吗?')"
"[INFO] [09-15 20:00:57] logging.py:55 [t:139698882193216]: requesting llm api endpoint: /chat/eb-instant\n" ]
] },
}, "execution_count": 7,
{ "metadata": {},
"name": "stdout", "output_type": "execute_result"
"output_type": "stream",
"text": [
"content='您好,您似乎输入' additional_kwargs={} example=False\n",
"content='了一个文本字符串,但并没有给出具体的问题或场景。' additional_kwargs={} example=False\n",
"content='如果您能提供更多信息,我可以更好地回答您的问题。' additional_kwargs={} example=False\n",
"content='' additional_kwargs={} example=False\n"
]
} }
], ],
"source": [ "source": [
"res = chat.stream(\n", "chat.invoke(\n",
" [HumanMessage(content=\"hi\")],\n", " [HumanMessage(content=\"Hello\")],\n",
" **{\"top_p\": 0.4, \"temperature\": 0.1, \"penalty_score\": 1},\n", " **{\"top_p\": 0.4, \"temperature\": 0.1, \"penalty_score\": 1},\n",
")\n", ")"
"\n",
"for r in res:\n",
" print(r)"
] ]
} }
], ],
@ -250,7 +266,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.11.5" "version": "3.9.18"
}, },
"vscode": { "vscode": {
"interpreter": { "interpreter": {