From 0345990a428c5ea0cb5d33acd571e56c1929bc85 Mon Sep 17 00:00:00 2001 From: Hayden Wolff Date: Fri, 26 Jul 2024 11:20:52 -0700 Subject: [PATCH] docs: Add NVIDIA NIMs to Model Tab and Feature Table (#24146) **Description:** Add NVIDIA NIMs to Model Tab and LLM Feature Table --------- Co-authored-by: Hayden Wolff Co-authored-by: Erick Friis Co-authored-by: Erick Friis --- docs/scripts/model_feat_table.py | 8 ++++++++ docs/src/theme/ChatModelTabs.js | 14 ++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/docs/scripts/model_feat_table.py b/docs/scripts/model_feat_table.py index b71c74edb01..b8617bcf2d7 100644 --- a/docs/scripts/model_feat_table.py +++ b/docs/scripts/model_feat_table.py @@ -87,6 +87,14 @@ CHAT_MODEL_FEAT_TABLE = { "package": "langchain-huggingface", "link": "/docs/integrations/chat/huggingface/", }, + "ChatNVIDIA": { + "tool_calling": True, + "json_mode": False, + "local": True, + "multimodal": False, + "package": "langchain-nvidia-ai-endpoints", + "link": "/docs/integrations/chat/nvidia_ai_endpoints/", + }, "ChatOllama": { "tool_calling": True, "local": True, diff --git a/docs/src/theme/ChatModelTabs.js b/docs/src/theme/ChatModelTabs.js index 030147a6344..2945c18f97d 100644 --- a/docs/src/theme/ChatModelTabs.js +++ b/docs/src/theme/ChatModelTabs.js @@ -14,6 +14,7 @@ import CodeBlock from "@theme-original/CodeBlock"; * @property {string} [mistralParams] - Parameters for Mistral chat model. Defaults to `model="mistral-large-latest"` * @property {string} [googleParams] - Parameters for Google chat model. Defaults to `model="gemini-pro"` * @property {string} [togetherParams] - Parameters for Together chat model. Defaults to `model="mistralai/Mixtral-8x7B-Instruct-v0.1"` + * @property {string} [nvidiaParams] - Parameters for Nvidia NIM model. Defaults to `model="meta/llama3-70b-instruct"` * @property {boolean} [hideOpenai] - Whether or not to hide OpenAI chat model. * @property {boolean} [hideAnthropic] - Whether or not to hide Anthropic chat model. * @property {boolean} [hideCohere] - Whether or not to hide Cohere chat model. @@ -23,6 +24,7 @@ import CodeBlock from "@theme-original/CodeBlock"; * @property {boolean} [hideGoogle] - Whether or not to hide Google VertexAI chat model. * @property {boolean} [hideTogether] - Whether or not to hide Together chat model. * @property {boolean} [hideAzure] - Whether or not to hide Microsoft Azure OpenAI chat model. + * @property {boolean} [hideNvidia] - Whether or not to hide NVIDIA NIM model. * @property {string} [customVarName] - Custom variable name for the model. Defaults to `model`. */ @@ -40,6 +42,7 @@ export default function ChatModelTabs(props) { googleParams, togetherParams, azureParams, + nvidiaParams, hideOpenai, hideAnthropic, hideCohere, @@ -49,6 +52,7 @@ export default function ChatModelTabs(props) { hideGoogle, hideTogether, hideAzure, + hideNvidia, customVarName, } = props; @@ -69,6 +73,7 @@ export default function ChatModelTabs(props) { const azureParamsOrDefault = azureParams ?? `\n azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],\n azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"],\n openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],\n`; + const nvidiaParamsOrDefault = nvidiaParams ?? `model="meta/llama3-70b-instruct"` const llmVarName = customVarName ?? "model"; @@ -118,6 +123,15 @@ export default function ChatModelTabs(props) { default: false, shouldHide: hideCohere, }, + { + value: "NVIDIA", + label: "NVIDIA", + text: `from langchain import ChatNVIDIA\n\n${llmVarName} = ChatNVIDIA(${nvidiaParamsOrDefault})`, + apiKeyName: "NVIDIA_API_KEY", + packageName: "langchain-nvidia-ai-endpoints", + default: false, + shouldHide: hideNvidia, + }, { value: "FireworksAI", label: "FireworksAI",