From 7345470669d302ba3b3d17252605a777a9ccec53 Mon Sep 17 00:00:00 2001 From: Ed Branch <140059108+ebranchy@users.noreply.github.com> Date: Thu, 24 Oct 2024 22:23:32 +0100 Subject: [PATCH] docs: add aws support to how-to-guides (#27450) This PR adds support to the how-to documentation for using AWS Bedrock and Sagemaker Endpoints. Because AWS services above dont presently use API keys to access LLMs I've amended more of the source code than would normally be expected. --------- Co-authored-by: Erick Friis --- docs/src/theme/ChatModelTabs.js | 30 +++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/docs/src/theme/ChatModelTabs.js b/docs/src/theme/ChatModelTabs.js index 9427b40396c..b6b403c2f8d 100644 --- a/docs/src/theme/ChatModelTabs.js +++ b/docs/src/theme/ChatModelTabs.js @@ -15,6 +15,7 @@ import CodeBlock from "@theme-original/CodeBlock"; * @property {string} [googleParams] - Parameters for Google chat model. Defaults to `model="gemini-pro"` * @property {string} [togetherParams] - Parameters for Together chat model. Defaults to `model="mistralai/Mixtral-8x7B-Instruct-v0.1"` * @property {string} [nvidiaParams] - Parameters for Nvidia NIM model. Defaults to `model="meta/llama3-70b-instruct"` + * @property {string} [awsBedrockParams] - Parameters for AWS Bedrock chat model. * @property {boolean} [hideOpenai] - Whether or not to hide OpenAI chat model. * @property {boolean} [hideAnthropic] - Whether or not to hide Anthropic chat model. * @property {boolean} [hideCohere] - Whether or not to hide Cohere chat model. @@ -25,6 +26,7 @@ import CodeBlock from "@theme-original/CodeBlock"; * @property {boolean} [hideTogether] - Whether or not to hide Together chat model. * @property {boolean} [hideAzure] - Whether or not to hide Microsoft Azure OpenAI chat model. * @property {boolean} [hideNvidia] - Whether or not to hide NVIDIA NIM model. + * @property {boolean} [hideAWS] - Whether or not to hide AWS models. * @property {string} [customVarName] - Custom variable name for the model. Defaults to `model`. */ @@ -43,6 +45,7 @@ export default function ChatModelTabs(props) { togetherParams, azureParams, nvidiaParams, + awsBedrockParams, hideOpenai, hideAnthropic, hideCohere, @@ -53,6 +56,7 @@ export default function ChatModelTabs(props) { hideTogether, hideAzure, hideNvidia, + hideAWS, customVarName, } = props; @@ -74,6 +78,7 @@ export default function ChatModelTabs(props) { azureParams ?? `\n azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],\n azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"],\n openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],\n`; const nvidiaParamsOrDefault = nvidiaParams ?? `model="meta/llama3-70b-instruct"` + const awsBedrockParamsOrDefault = awsBedrockParams ?? `model_id="anthropic.claude-3-5-sonnet-20240620-v1:0"`; const llmVarName = customVarName ?? "model"; @@ -168,6 +173,15 @@ export default function ChatModelTabs(props) { default: false, shouldHide: hideTogether, }, + { + value: "AWS", + label: "AWS", + text: `from langchain_aws import ChatBedrock\n\n${llmVarName} = ChatBedrock(${awsBedrockParamsOrDefault})`, + apiKeyText: "# Ensure your AWS credentials are configured", + packageName: "langchain-aws", + default: false, + shouldHide: hideAWS, + }, ]; return ( @@ -175,10 +189,16 @@ export default function ChatModelTabs(props) { {tabItems .filter((tabItem) => !tabItem.shouldHide) .map((tabItem) => { - const apiKeyText = `import getpass + let apiKeyText = ""; + if (tabItem.apiKeyName) { + apiKeyText = `import getpass import os os.environ["${tabItem.apiKeyName}"] = getpass.getpass()`; + } else if (tabItem.apiKeyText) { + apiKeyText = tabItem.apiKeyText; + } + return ( - {`pip install -qU ${tabItem.packageName}`} - {apiKeyText + "\n\n" + tabItem.text} + + {`pip install -qU ${tabItem.packageName}`} + + + {apiKeyText ? apiKeyText + "\n\n" + tabItem.text : tabItem.text} + ); })