docs[patch]: update chatbot tutorial and migration guide (#26780)

This commit is contained in:
ccurme 2024-09-24 10:18:48 -04:00 committed by GitHub
parent e1e4f88b3e
commit a7aad27cba
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 447 additions and 484 deletions

File diff suppressed because it is too large Load Diff

View File

@ -9,13 +9,13 @@
"\n",
"[`ConversationChain`](https://python.langchain.com/api_reference/langchain/chains/langchain.chains.conversation.base.ConversationChain.html) incorporated a memory of previous messages to sustain a stateful conversation.\n",
"\n",
"Some advantages of switching to the LCEL implementation are:\n",
"Some advantages of switching to the Langgraph implementation are:\n",
"\n",
"- Innate support for threads/separate sessions. To make this work with `ConversationChain`, you'd need to instantiate a separate memory class outside the chain.\n",
"- More explicit parameters. `ConversationChain` contains a hidden default prompt, which can cause confusion.\n",
"- Streaming support. `ConversationChain` only supports streaming via callbacks.\n",
"\n",
"`RunnableWithMessageHistory` implements sessions via configuration parameters. It should be instantiated with a callable that returns a [chat message history](https://python.langchain.com/api_reference/core/chat_history/langchain_core.chat_history.BaseChatMessageHistory.html). By default, it expects this function to take a single argument `session_id`."
"Langgraph's [checkpointing](https://langchain-ai.github.io/langgraph/how-tos/persistence/) system supports multiple threads or sessions, which can be specified via the `\"thread_id\"` key in its configuration parameters."
]
},
{
@ -61,9 +61,9 @@
{
"data": {
"text/plain": [
"{'input': 'how are you?',\n",
"{'input': \"I'm Bob, how are you?\",\n",
" 'history': '',\n",
" 'response': \"Arr matey, I be doin' well on the high seas, plunderin' and pillagin' as usual. How be ye?\"}"
" 'response': \"Arrr matey, I be a pirate sailin' the high seas. What be yer business with me?\"}"
]
},
"execution_count": 2,
@ -93,7 +93,30 @@
" prompt=prompt,\n",
")\n",
"\n",
"chain({\"input\": \"how are you?\"})"
"chain({\"input\": \"I'm Bob, how are you?\"})"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "53f2c723-178f-470a-8147-54e7cb982211",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'input': 'What is my name?',\n",
" 'history': \"Human: I'm Bob, how are you?\\nAI: Arrr matey, I be a pirate sailin' the high seas. What be yer business with me?\",\n",
" 'response': 'Your name be Bob, matey.'}"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"chain({\"input\": \"What is my name?\"})"
]
},
{
@ -103,111 +126,110 @@
"source": [
"</details>\n",
"\n",
"## LCEL\n",
"## Langgraph\n",
"\n",
"<details open>"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "666c92a0-b555-4418-a465-6490c1b92570",
"execution_count": 4,
"id": "a59b910c-0d02-41aa-bc99-441f11989cf8",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"\"Arr, me matey! I be doin' well, sailin' the high seas and searchin' for treasure. How be ye?\""
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"outputs": [],
"source": [
"from langchain_core.chat_history import InMemoryChatMessageHistory\n",
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_core.prompts import ChatPromptTemplate\n",
"from langchain_core.runnables.history import RunnableWithMessageHistory\n",
"import uuid\n",
"\n",
"from langchain_openai import ChatOpenAI\n",
"from langgraph.checkpoint.memory import MemorySaver\n",
"from langgraph.graph import START, MessagesState, StateGraph\n",
"\n",
"prompt = ChatPromptTemplate.from_messages(\n",
" [\n",
" (\"system\", \"You are a pirate. Answer the following questions as best you can.\"),\n",
" (\"placeholder\", \"{chat_history}\"),\n",
" (\"human\", \"{input}\"),\n",
" ]\n",
")\n",
"model = ChatOpenAI(model=\"gpt-4o-mini\")\n",
"\n",
"history = InMemoryChatMessageHistory()\n",
"# Define a new graph\n",
"workflow = StateGraph(state_schema=MessagesState)\n",
"\n",
"\n",
"def get_history():\n",
" return history\n",
"# Define the function that calls the model\n",
"def call_model(state: MessagesState):\n",
" response = model.invoke(state[\"messages\"])\n",
" return {\"messages\": response}\n",
"\n",
"\n",
"chain = prompt | ChatOpenAI() | StrOutputParser()\n",
"# Define the two nodes we will cycle between\n",
"workflow.add_edge(START, \"model\")\n",
"workflow.add_node(\"model\", call_model)\n",
"\n",
"wrapped_chain = RunnableWithMessageHistory(\n",
" chain,\n",
" get_history,\n",
" history_messages_key=\"chat_history\",\n",
")\n",
"# Add memory\n",
"memory = MemorySaver()\n",
"app = workflow.compile(checkpointer=memory)\n",
"\n",
"wrapped_chain.invoke({\"input\": \"how are you?\"})"
]
},
{
"cell_type": "markdown",
"id": "6b386ce6-895e-442c-88f3-7bec0ab9f401",
"metadata": {},
"source": [
"The above example uses the same `history` for all sessions. The example below shows how to use a different chat history for each session."
"\n",
"# The thread id is a unique key that identifies\n",
"# this particular conversation.\n",
"# We'll just generate a random uuid here.\n",
"thread_id = uuid.uuid4()\n",
"config = {\"configurable\": {\"thread_id\": thread_id}}"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "96152263-98d7-4e06-8c73-d0c0abf3e8e9",
"execution_count": 5,
"id": "3a9df4bb-e804-4373-9a15-a29dc0371595",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'Ahoy there, me hearty! What can this old pirate do for ye today?'"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
"name": "stdout",
"output_type": "stream",
"text": [
"================================\u001b[1m Human Message \u001b[0m=================================\n",
"\n",
"I'm Bob, how are you?\n",
"==================================\u001b[1m Ai Message \u001b[0m==================================\n",
"\n",
"Ahoy, Bob! I be feelin' as lively as a ship in full sail! How be ye on this fine day?\n"
]
}
],
"source": [
"from langchain_core.chat_history import BaseChatMessageHistory\n",
"from langchain_core.runnables.history import RunnableWithMessageHistory\n",
"query = \"I'm Bob, how are you?\"\n",
"\n",
"store = {}\n",
"input_messages = [\n",
" {\n",
" \"role\": \"system\",\n",
" \"content\": \"You are a pirate. Answer the following questions as best you can.\",\n",
" },\n",
" {\"role\": \"user\", \"content\": query},\n",
"]\n",
"for event in app.stream({\"messages\": input_messages}, config, stream_mode=\"values\"):\n",
" event[\"messages\"][-1].pretty_print()"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "d3f77e69-fa3d-496c-968c-86371e1e8cf1",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"================================\u001b[1m Human Message \u001b[0m=================================\n",
"\n",
"What is my name?\n",
"==================================\u001b[1m Ai Message \u001b[0m==================================\n",
"\n",
"Ye be callin' yerself Bob, I reckon! A fine name for a swashbuckler like yerself!\n"
]
}
],
"source": [
"query = \"What is my name?\"\n",
"\n",
"\n",
"def get_session_history(session_id: str) -> BaseChatMessageHistory:\n",
" if session_id not in store:\n",
" store[session_id] = InMemoryChatMessageHistory()\n",
" return store[session_id]\n",
"\n",
"\n",
"chain = prompt | ChatOpenAI() | StrOutputParser()\n",
"\n",
"wrapped_chain = RunnableWithMessageHistory(\n",
" chain,\n",
" get_session_history,\n",
" history_messages_key=\"chat_history\",\n",
")\n",
"\n",
"wrapped_chain.invoke(\n",
" {\"input\": \"Hello!\"},\n",
" config={\"configurable\": {\"session_id\": \"abc123\"}},\n",
")"
"input_messages = [{\"role\": \"user\", \"content\": query}]\n",
"for event in app.stream({\"messages\": input_messages}, config, stream_mode=\"values\"):\n",
" event[\"messages\"][-1].pretty_print()"
]
},
{