diff --git a/docs/extras/guides/expression_language/cookbook.ipynb b/docs/extras/guides/expression_language/cookbook.ipynb index 19dd9b55547..3d46843b335 100644 --- a/docs/extras/guides/expression_language/cookbook.ipynb +++ b/docs/extras/guides/expression_language/cookbook.ipynb @@ -22,19 +22,10 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 10, "id": "466b65b3", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/harrisonchase/.pyenv/versions/3.9.1/envs/langchain/lib/python3.9/site-packages/deeplake/util/check_latest_version.py:32: UserWarning: A newer version of deeplake (3.6.14) is available. It's recommended that you update to the latest version using `pip install -U deeplake`.\n", - " warnings.warn(\n" - ] - } - ], + "outputs": [], "source": [ "from langchain.prompts import ChatPromptTemplate\n", "from langchain.chat_models import ChatOpenAI" @@ -42,7 +33,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 11, "id": "3c634ef0", "metadata": {}, "outputs": [], @@ -211,7 +202,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 12, "id": "f799664d", "metadata": {}, "outputs": [], @@ -356,7 +347,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 13, "id": "5d3d8ffe", "metadata": {}, "outputs": [], @@ -377,7 +368,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 14, "id": "33be32af", "metadata": {}, "outputs": [], @@ -389,7 +380,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 15, "id": "df3f3fa2", "metadata": {}, "outputs": [], @@ -401,7 +392,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 16, "id": "bfc47ec1", "metadata": {}, "outputs": [], @@ -416,7 +407,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 17, "id": "eae31755", "metadata": {}, "outputs": [], @@ -431,7 +422,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 18, "id": "f3040b0c", "metadata": { "scrolled": false @@ -450,7 +441,7 @@ "'Harrison worked at Kensho.'" ] }, - "execution_count": 25, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -461,7 +452,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 19, "id": "e1d20c7c", "metadata": {}, "outputs": [], @@ -484,7 +475,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 20, "id": "7ee8b2d4", "metadata": { "scrolled": false @@ -503,7 +494,7 @@ "'Harrison ha lavorato a Kensho.'" ] }, - "execution_count": 28, + "execution_count": 20, "metadata": {}, "output_type": "execute_result" } @@ -512,6 +503,170 @@ "chain.invoke({\"question\": \"where did harrison work\", \"language\": \"italian\"})" ] }, + { + "cell_type": "markdown", + "id": "f007669c", + "metadata": {}, + "source": [ + "## Conversational Retrieval Chain\n", + "\n", + "We can easily add in conversation history. This primarily means adding in chat_message_history" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "id": "3f30c348", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.schema.runnable import RunnableMap\n", + "from langchain.schema import format_document" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "64ab1dbf", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.prompts.prompt import PromptTemplate\n", + "\n", + "_template = \"\"\"Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.\n", + "\n", + "Chat History:\n", + "{chat_history}\n", + "Follow Up Input: {question}\n", + "Standalone question:\"\"\"\n", + "CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "7d628c97", + "metadata": {}, + "outputs": [], + "source": [ + "template = \"\"\"Answer the question based only on the following context:\n", + "{context}\n", + "\n", + "Question: {question}\n", + "\"\"\"\n", + "ANSWER_PROMPT = ChatPromptTemplate.from_template(template)" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "id": "f60a5d0f", + "metadata": {}, + "outputs": [], + "source": [ + "DEFAULT_DOCUMENT_PROMPT = PromptTemplate.from_template(template=\"{page_content}\")\n", + "def _combine_documents(docs, document_prompt = DEFAULT_DOCUMENT_PROMPT, document_separator=\"\\n\\n\"):\n", + " doc_strings = [format_document(doc, document_prompt) for doc in docs]\n", + " return document_separator.join(doc_strings)" + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "id": "7d007db6", + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Tuple, List\n", + "def _format_chat_history(chat_history: List[Tuple]) -> str:\n", + " buffer = \"\"\n", + " for dialogue_turn in chat_history:\n", + " human = \"Human: \" + dialogue_turn[0]\n", + " ai = \"Assistant: \" + dialogue_turn[1]\n", + " buffer += \"\\n\" + \"\\n\".join([human, ai])\n", + " return buffer" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "id": "5c32cc89", + "metadata": {}, + "outputs": [], + "source": [ + "conversational_qa_chain = RunnableMap({\n", + " \"standalone_question\": {\n", + " \"question\": lambda x: x[\"question\"],\n", + " \"chat_history\": lambda x: _format_chat_history(x['chat_history'])\n", + " } | CONDENSE_QUESTION_PROMPT | ChatOpenAI(temperature=0) | StrOutputParser(),\n", + "}) | {\n", + " \"context\": itemgetter(\"standalone_question\") | retriever | _combine_documents,\n", + " \"question\": lambda x: x[\"standalone_question\"]\n", + "} | ANSWER_PROMPT | ChatOpenAI()" + ] + }, + { + "cell_type": "code", + "execution_count": 71, + "id": "135c8205", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Number of requested results 4 is greater than number of elements in index 1, updating n_results = 1\n" + ] + }, + { + "data": { + "text/plain": [ + "AIMessage(content='Harrison was employed at Kensho.', additional_kwargs={}, example=False)" + ] + }, + "execution_count": 71, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "conversational_qa_chain.invoke({\n", + " \"question\": \"where did harrison work?\",\n", + " \"chat_history\": [],\n", + "})" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "id": "424e7e7a", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Number of requested results 4 is greater than number of elements in index 1, updating n_results = 1\n" + ] + }, + { + "data": { + "text/plain": [ + "AIMessage(content='Harrison worked at Kensho.', additional_kwargs={}, example=False)" + ] + }, + "execution_count": 62, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "conversational_qa_chain.invoke({\n", + " \"question\": \"where did he work?\",\n", + " \"chat_history\": [(\"Who wrote this notebook?\", \"Harrison\")],\n", + "})" + ] + }, { "cell_type": "markdown", "id": "0f2bf8d3",