diff --git a/docs/docs/integrations/chat/bedrock.ipynb b/docs/docs/integrations/chat/bedrock.ipynb index 1a74fb273ee..3d7b92e7c98 100644 --- a/docs/docs/integrations/chat/bedrock.ipynb +++ b/docs/docs/integrations/chat/bedrock.ipynb @@ -137,6 +137,77 @@ "for chunk in chat.stream(messages):\n", " print(chunk.content, end=\"\", flush=True)" ] + }, + { + "cell_type": "markdown", + "id": "c36575b3", + "metadata": {}, + "source": [ + "### LLM Caching with OpenSearch Semantic Cache\n", + "\n", + "Use OpenSearch as a semantic cache to cache prompts and responses and evaluate hits based on semantic similarity.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "375d4e56", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.globals import set_llm_cache\n", + "from langchain_aws import BedrockEmbeddings, ChatBedrock\n", + "from langchain_community.cache import OpenSearchSemanticCache\n", + "from langchain_core.messages import HumanMessage\n", + "\n", + "bedrock_embeddings = BedrockEmbeddings(\n", + " model_id=\"amazon.titan-embed-text-v1\", region_name=\"us-east-1\"\n", + ")\n", + "\n", + "chat = ChatBedrock(\n", + " model_id=\"anthropic.claude-3-haiku-20240307-v1:0\", model_kwargs={\"temperature\": 0.5}\n", + ")\n", + "\n", + "# Enable LLM cache. Make sure OpenSearch is set up and running. Update URL accordingly.\n", + "set_llm_cache(\n", + " OpenSearchSemanticCache(\n", + " opensearch_url=\"http://localhost:9200\", embedding=bedrock_embeddings\n", + " )\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bb5d25bb", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "# The first time, it is not yet in cache, so it should take longer\n", + "messages = [HumanMessage(content=\"tell me about Amazon Bedrock\")]\n", + "response_text = chat.invoke(messages)\n", + "\n", + "print(response_text)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6cfb3086", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "# The second time, while not a direct hit, the question is semantically similar to the original question,\n", + "# so it uses the cached result!\n", + "\n", + "messages = [HumanMessage(content=\"what is amazon bedrock\")]\n", + "response_text = chat.invoke(messages)\n", + "\n", + "print(response_text)" + ] } ], "metadata": {