docs: Add ChatDatabricks to llm models (#28398)

Thank you for contributing to LangChain!

Add `ChatDatabricks` to the list of LLM models options.

Additional guidelines:
- Make sure optional dependencies are imported within a function.
- Please do not add dependencies to pyproject.toml files (even optional
ones) unless they are required for unit tests.
- Most PRs should not touch more than one package.
- Changes should be backwards compatible.
- If you are adding something to community, do not re-import it in
langchain.

If no one reviews your PR within a few days, please @-mention one of
baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17.

---------

Signed-off-by: Prithvi Kannan <prithvi.kannan@databricks.com>
Co-authored-by: Chester Curme <chester.curme@gmail.com>
This commit is contained in:
Prithvi Kannan 2024-12-02 10:19:30 -08:00 committed by GitHub
parent 58d2bfe310
commit e5b4f9ad75
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -15,6 +15,7 @@ import CodeBlock from "@theme-original/CodeBlock";
* @property {string} [googleParams] - Parameters for Google chat model. Defaults to `model="gemini-pro"` * @property {string} [googleParams] - Parameters for Google chat model. Defaults to `model="gemini-pro"`
* @property {string} [togetherParams] - Parameters for Together chat model. Defaults to `model="mistralai/Mixtral-8x7B-Instruct-v0.1"` * @property {string} [togetherParams] - Parameters for Together chat model. Defaults to `model="mistralai/Mixtral-8x7B-Instruct-v0.1"`
* @property {string} [nvidiaParams] - Parameters for Nvidia NIM model. Defaults to `model="meta/llama3-70b-instruct"` * @property {string} [nvidiaParams] - Parameters for Nvidia NIM model. Defaults to `model="meta/llama3-70b-instruct"`
* @property {string} [databricksParams] - Parameters for Databricks model. Defaults to `endpoint="databricks-meta-llama-3-1-70b-instruct"`
* @property {string} [awsBedrockParams] - Parameters for AWS Bedrock chat model. * @property {string} [awsBedrockParams] - Parameters for AWS Bedrock chat model.
* @property {boolean} [hideOpenai] - Whether or not to hide OpenAI chat model. * @property {boolean} [hideOpenai] - Whether or not to hide OpenAI chat model.
* @property {boolean} [hideAnthropic] - Whether or not to hide Anthropic chat model. * @property {boolean} [hideAnthropic] - Whether or not to hide Anthropic chat model.
@ -27,6 +28,7 @@ import CodeBlock from "@theme-original/CodeBlock";
* @property {boolean} [hideAzure] - Whether or not to hide Microsoft Azure OpenAI chat model. * @property {boolean} [hideAzure] - Whether or not to hide Microsoft Azure OpenAI chat model.
* @property {boolean} [hideNvidia] - Whether or not to hide NVIDIA NIM model. * @property {boolean} [hideNvidia] - Whether or not to hide NVIDIA NIM model.
* @property {boolean} [hideAWS] - Whether or not to hide AWS models. * @property {boolean} [hideAWS] - Whether or not to hide AWS models.
* @property {boolean} [hideDatabricks] - Whether or not to hide Databricks models.
* @property {string} [customVarName] - Custom variable name for the model. Defaults to `model`. * @property {string} [customVarName] - Custom variable name for the model. Defaults to `model`.
*/ */
@ -46,6 +48,7 @@ export default function ChatModelTabs(props) {
azureParams, azureParams,
nvidiaParams, nvidiaParams,
awsBedrockParams, awsBedrockParams,
databricksParams,
hideOpenai, hideOpenai,
hideAnthropic, hideAnthropic,
hideCohere, hideCohere,
@ -57,6 +60,7 @@ export default function ChatModelTabs(props) {
hideAzure, hideAzure,
hideNvidia, hideNvidia,
hideAWS, hideAWS,
hideDatabricks,
customVarName, customVarName,
} = props; } = props;
@ -79,6 +83,7 @@ export default function ChatModelTabs(props) {
`\n azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],\n azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"],\n openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],\n`; `\n azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],\n azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"],\n openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],\n`;
const nvidiaParamsOrDefault = nvidiaParams ?? `model="meta/llama3-70b-instruct"` const nvidiaParamsOrDefault = nvidiaParams ?? `model="meta/llama3-70b-instruct"`
const awsBedrockParamsOrDefault = awsBedrockParams ?? `model="anthropic.claude-3-5-sonnet-20240620-v1:0",\n beta_use_converse_api=True`; const awsBedrockParamsOrDefault = awsBedrockParams ?? `model="anthropic.claude-3-5-sonnet-20240620-v1:0",\n beta_use_converse_api=True`;
const databricksParamsOrDefault = databricksParams ?? `endpoint="databricks-meta-llama-3-1-70b-instruct"`
const llmVarName = customVarName ?? "model"; const llmVarName = customVarName ?? "model";
@ -182,6 +187,15 @@ export default function ChatModelTabs(props) {
default: false, default: false,
shouldHide: hideTogether, shouldHide: hideTogether,
}, },
{
value: "Databricks",
label: "Databricks",
text: `from databricks_langchain import ChatDatabricks\n\nos.environ["DATABRICKS_HOST"] = "https://example.staging.cloud.databricks.com/serving-endpoints"\n\n${llmVarName} = ChatDatabricks(${databricksParamsOrDefault})`,
apiKeyName: "DATABRICKS_TOKEN",
packageName: "databricks-langchain",
default: false,
shouldHide: hideDatabricks,
},
]; ];
return ( return (