From 58da6e0d476d3b3554c30d68c847ce93b5194924 Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Thu, 9 Nov 2023 09:54:00 -0800 Subject: [PATCH] Multimodal rag traces (#13140) --- cookbook/Multi_modal_RAG.ipynb | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/cookbook/Multi_modal_RAG.ipynb b/cookbook/Multi_modal_RAG.ipynb index e48a7e01a66..3a2bb1206c6 100644 --- a/cookbook/Multi_modal_RAG.ipynb +++ b/cookbook/Multi_modal_RAG.ipynb @@ -12,6 +12,8 @@ "source": [ "## Multi-modal RAG\n", "\n", + "[See Trace of Option 3](https://smith.langchain.com/public/db0441a8-2c17-4070-bdf7-45d4fdf8f517/r/80cb0f89-1766-4caf-8959-fc43ec4b071c)\n", + "\n", "Many documents contain a mixture of content types, including text and images. \n", "\n", "Yet, information captured in images is lost in most RAG applications.\n", @@ -202,7 +204,13 @@ "source": [ "### Image summaries \n", "\n", - "We will use [GPT4-V](https://openai.com/research/gpt-4v-system-card) to produce the image summaries." + "We will use [GPT4-V](https://openai.com/research/gpt-4v-system-card) to produce the image summaries.\n", + "\n", + "See the traces for each of the 5 ingested images here ([1](https://smith.langchain.com/public/f5548212-2e70-4fa8-91d6-c3e7d768d52b/r), \n", + "[2](https://smith.langchain.com/public/8b198178-5b83-4960-bbc1-c10516779208/r), \n", + "[3](https://smith.langchain.com/public/c4fcbcd5-38fb-462a-9ed1-e90b1d009fa9/r), \n", + "[4](https://smith.langchain.com/public/1df53c23-63b8-4f87-b5ae-e9d59b2a54ab/r), \n", + "[5](https://smith.langchain.com/public/f93efd6c-f9f6-46c9-b169-29270d33ad63/r))" ] }, { @@ -576,7 +584,7 @@ "id": "dea241f1-bd11-45cb-bb33-c4e2e8286855", "metadata": {}, "source": [ - "Here is the [trace](https://smith.langchain.com/public/43d955ff-212f-4043-8d63-986b0e4e4eed/r). " + "Here is the [trace](https://smith.langchain.com/public/db0441a8-2c17-4070-bdf7-45d4fdf8f517/r/80cb0f89-1766-4caf-8959-fc43ec4b071c). " ] } ],