diff --git a/docs/docs/how_to/embed_text.mdx b/docs/docs/how_to/embed_text.mdx
index 2450e99440c..0c636fec3e7 100644
--- a/docs/docs/how_to/embed_text.mdx
+++ b/docs/docs/how_to/embed_text.mdx
@@ -15,87 +15,9 @@ The base Embeddings class in LangChain provides two methods: one for embedding d
### Setup
-import Tabs from '@theme/Tabs';
-import TabItem from '@theme/TabItem';
+import EmbeddingTabs from "@theme/EmbeddingTabs";
-
-
-To start we'll need to install the OpenAI partner package:
-
-```bash
-pip install langchain-openai
-```
-
-Accessing the API requires an API key, which you can get by creating an account and heading [here](https://platform.openai.com/account/api-keys). Once we have a key we'll want to set it as an environment variable by running:
-
-```bash
-export OPENAI_API_KEY="..."
-```
-
-If you'd prefer not to set an environment variable you can pass the key in directly via the `api_key` named parameter when initiating the OpenAI LLM class:
-
-```python
-from langchain_openai import OpenAIEmbeddings
-
-embeddings_model = OpenAIEmbeddings(api_key="...")
-```
-
-Otherwise you can initialize without any params:
-```python
-from langchain_openai import OpenAIEmbeddings
-
-embeddings_model = OpenAIEmbeddings()
-```
-
-
-
-
-To start we'll need to install the Cohere SDK package:
-
-```bash
-pip install langchain-cohere
-```
-
-Accessing the API requires an API key, which you can get by creating an account and heading [here](https://dashboard.cohere.com/api-keys). Once we have a key we'll want to set it as an environment variable by running:
-
-```shell
-export COHERE_API_KEY="..."
-```
-
-If you'd prefer not to set an environment variable you can pass the key in directly via the `cohere_api_key` named parameter when initiating the Cohere LLM class:
-
-```python
-from langchain_cohere import CohereEmbeddings
-
-embeddings_model = CohereEmbeddings(cohere_api_key="...", model='embed-english-v3.0')
-```
-
-Otherwise you can initialize simply as shown below:
-```python
-from langchain_cohere import CohereEmbeddings
-
-embeddings_model = CohereEmbeddings(model='embed-english-v3.0')
-```
-Do note that it is mandatory to pass the model parameter while initializing the CohereEmbeddings class.
-
-
-
-
-To start we'll need to install the Hugging Face partner package:
-
-```bash
-pip install langchain-huggingface
-```
-
-You can then load any [Sentence Transformers model](https://huggingface.co/models?library=sentence-transformers) from the Hugging Face Hub.
-
-```python
-from langchain_huggingface import HuggingFaceEmbeddings
-
-embeddings_model = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
-```
-
-
+
### `embed_documents`
#### Embed list of texts
diff --git a/docs/docs/integrations/chat/index.mdx b/docs/docs/integrations/chat/index.mdx
index e2b0be6da4d..537cdc2c11a 100644
--- a/docs/docs/integrations/chat/index.mdx
+++ b/docs/docs/integrations/chat/index.mdx
@@ -15,6 +15,14 @@ If you'd like to contribute an integration, see [Contributing integrations](/doc
:::
+import ChatModelTabs from "@theme/ChatModelTabs";
+
+
+
+```python
+model.invoke("Hello, world!")
+```
+
## Featured Providers
:::info
diff --git a/docs/docs/integrations/text_embedding/azureopenai.ipynb b/docs/docs/integrations/text_embedding/azureopenai.ipynb
index 7f5281e21e9..916132c6f3e 100644
--- a/docs/docs/integrations/text_embedding/azureopenai.ipynb
+++ b/docs/docs/integrations/text_embedding/azureopenai.ipynb
@@ -45,7 +45,7 @@
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": null,
"id": "36521c2a",
"metadata": {},
"outputs": [],
@@ -53,8 +53,10 @@
"import getpass\n",
"import os\n",
"\n",
- "if not os.getenv(\"OPENAI_API_KEY\"):\n",
- " os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"Enter your AzureOpenAI API key: \")"
+ "if not os.getenv(\"AZURE_OPENAI_API_KEY\"):\n",
+ " os.environ[\"AZURE_OPENAI_API_KEY\"] = getpass.getpass(\n",
+ " \"Enter your AzureOpenAI API key: \"\n",
+ " )"
]
},
{
diff --git a/docs/docs/integrations/text_embedding/bedrock.ipynb b/docs/docs/integrations/text_embedding/bedrock.ipynb
index 3c476850782..b559621f291 100644
--- a/docs/docs/integrations/text_embedding/bedrock.ipynb
+++ b/docs/docs/integrations/text_embedding/bedrock.ipynb
@@ -31,12 +31,12 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": null,
"id": "282239c8-e03a-4abc-86c1-ca6120231a20",
"metadata": {},
"outputs": [],
"source": [
- "from langchain_community.embeddings import BedrockEmbeddings\n",
+ "from langchain_aws import BedrockEmbeddings\n",
"\n",
"embeddings = BedrockEmbeddings(\n",
" credentials_profile_name=\"bedrock-admin\", region_name=\"us-east-1\"\n",
diff --git a/docs/docs/integrations/text_embedding/index.mdx b/docs/docs/integrations/text_embedding/index.mdx
index fe2a6271921..16948056512 100644
--- a/docs/docs/integrations/text_embedding/index.mdx
+++ b/docs/docs/integrations/text_embedding/index.mdx
@@ -11,6 +11,14 @@ import { CategoryTable, IndexTable } from "@theme/FeatureTables";
This page documents integrations with various model providers that allow you to use embeddings in LangChain.
+import EmbeddingTabs from "@theme/EmbeddingTabs";
+
+
+
+```python
+embeddings.embed_query("Hello, world!")
+```
+
## All embedding models
diff --git a/docs/docs/integrations/vectorstores/index.mdx b/docs/docs/integrations/vectorstores/index.mdx
index 8b276057dad..c213d968ee3 100644
--- a/docs/docs/integrations/vectorstores/index.mdx
+++ b/docs/docs/integrations/vectorstores/index.mdx
@@ -3,12 +3,24 @@ sidebar_position: 0
sidebar_class_name: hidden
---
-# Vectorstores
+# Vector stores
import { CategoryTable, IndexTable } from "@theme/FeatureTables";
A [vector store](/docs/concepts/vectorstores) stores [embedded](/docs/concepts/embedding_models) data and performs similarity search.
+**Select embedding model:**
+
+import EmbeddingTabs from "@theme/EmbeddingTabs";
+
+
+
+**Select vector store:**
+
+import VectorStoreTabs from "@theme/VectorStoreTabs";
+
+
+
## All Vectorstores
diff --git a/docs/src/theme/ChatModelTabs.js b/docs/src/theme/ChatModelTabs.js
index 9d27a9a44a1..162723942aa 100644
--- a/docs/src/theme/ChatModelTabs.js
+++ b/docs/src/theme/ChatModelTabs.js
@@ -114,7 +114,7 @@ export default function ChatModelTabs(props) {
value: "Google",
label: "Google",
text: `from langchain_google_vertexai import ChatVertexAI\n\n${llmVarName} = ChatVertexAI(${googleParamsOrDefault})`,
- apiKeyName: "GOOGLE_API_KEY",
+ apiKeyText: "# Ensure your VertexAI credentials are configured",
packageName: "langchain-google-vertexai",
default: false,
shouldHide: hideGoogle,
diff --git a/docs/src/theme/EmbeddingTabs.js b/docs/src/theme/EmbeddingTabs.js
index 7ad62a515ad..3c4d1d814fd 100644
--- a/docs/src/theme/EmbeddingTabs.js
+++ b/docs/src/theme/EmbeddingTabs.js
@@ -7,15 +7,41 @@ export default function EmbeddingTabs(props) {
const {
openaiParams,
hideOpenai,
+ azureOpenaiParams,
+ hideAzureOpenai,
+ googleParams,
+ hideGoogle,
+ awsParams,
+ hideAws,
huggingFaceParams,
hideHuggingFace,
+ ollamaParams,
+ hideOllama,
+ cohereParams,
+ hideCohere,
+ mistralParams,
+ hideMistral,
+ nomicParams,
+ hideNomic,
+ nvidiaParams,
+ hideNvidia,
fakeEmbeddingParams,
hideFakeEmbedding,
customVarName,
} = props;
const openAIParamsOrDefault = openaiParams ?? `model="text-embedding-3-large"`;
+ const azureParamsOrDefault =
+ azureOpenaiParams ??
+ `\n azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],\n azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"],\n openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],\n`;
+ const googleParamsOrDefault = googleParams ?? `model="text-embedding-004"`;
+ const awsParamsOrDefault = awsParams ?? `model_id="amazon.titan-embed-text-v2:0"`;
const huggingFaceParamsOrDefault = huggingFaceParams ?? `model="sentence-transformers/all-mpnet-base-v2"`;
+ const ollamaParamsOrDefault = ollamaParams ?? `model="llama3"`;
+ const cohereParamsOrDefault = cohereParams ?? `model="embed-english-v3.0"`;
+ const mistralParamsOrDefault = mistralParams ?? `model="mistral-embed"`;
+ const nomicsParamsOrDefault = nomicParams ?? `model="nomic-embed-text-v1.5"`;
+ const nvidiaParamsOrDefault = nvidiaParams ?? `model="NV-Embed-QA"`;
const fakeEmbeddingParamsOrDefault = fakeEmbeddingParams ?? `size=4096`;
const embeddingVarName = customVarName ?? "embeddings";
@@ -30,6 +56,33 @@ export default function EmbeddingTabs(props) {
default: true,
shouldHide: hideOpenai,
},
+ {
+ value: "Azure",
+ label: "Azure",
+ text: `from langchain_openai import AzureOpenAIEmbeddings\n\n${embeddingVarName} = AzureOpenAIEmbeddings(${azureParamsOrDefault})`,
+ apiKeyName: "AZURE_OPENAI_API_KEY",
+ packageName: "langchain-openai",
+ default: false,
+ shouldHide: hideAzureOpenai,
+ },
+ {
+ value: "Google",
+ label: "Google",
+ text: `from langchain_google_vertexai import VertexAIEmbeddings\n\n${embeddingVarName} = VertexAIEmbeddings(${googleParamsOrDefault})`,
+ apiKeyName: undefined,
+ packageName: "langchain-google-vertexai",
+ default: false,
+ shouldHide: hideGoogle,
+ },
+ {
+ value: "AWS",
+ label: "AWS",
+ text: `from langchain_aws import BedrockEmbeddings\n\n${embeddingVarName} = BedrockEmbeddings(${awsParamsOrDefault})`,
+ apiKeyName: undefined,
+ packageName: "langchain-aws",
+ default: false,
+ shouldHide: hideAws,
+ },
{
value: "HuggingFace",
label: "HuggingFace",
@@ -40,8 +93,53 @@ export default function EmbeddingTabs(props) {
shouldHide: hideHuggingFace,
},
{
- value: "Fake Embedding",
- label: "Fake Embedding",
+ value: "Ollama",
+ label: "Ollama",
+ text: `from langchain_ollama import OllamaEmbeddings\n\n${embeddingVarName} = OllamaEmbeddings(${ollamaParamsOrDefault})`,
+ apiKeyName: undefined,
+ packageName: "langchain-ollama",
+ default: false,
+ shouldHide: hideOllama,
+ },
+ {
+ value: "Cohere",
+ label: "Cohere",
+ text: `from langchain_cohere import CohereEmbeddings\n\n${embeddingVarName} = CohereEmbeddings(${cohereParamsOrDefault})`,
+ apiKeyName: "COHERE_API_KEY",
+ packageName: "langchain-cohere",
+ default: false,
+ shouldHide: hideCohere,
+ },
+ {
+ value: "MistralAI",
+ label: "MistralAI",
+ text: `from langchain_mistralai import MistralAIEmbeddings\n\n${embeddingVarName} = MistralAIEmbeddings(${mistralParamsOrDefault})`,
+ apiKeyName: "MISTRALAI_API_KEY",
+ packageName: "langchain-mistralai",
+ default: false,
+ shouldHide: hideMistral,
+ },
+ {
+ value: "Nomic",
+ label: "Nomic",
+ text: `from langchain_nomic import NomicEmbeddings\n\n${embeddingVarName} = NomicEmbeddings(${nomicsParamsOrDefault})`,
+ apiKeyName: "NOMIC_API_KEY",
+ packageName: "langchain-nomic",
+ default: false,
+ shouldHide: hideNomic,
+ },
+ {
+ value: "NVIDIA",
+ label: "NVIDIA",
+ text: `from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings\n\n${embeddingVarName} = NVIDIAEmbeddings(${nvidiaParamsOrDefault})`,
+ apiKeyName: "NVIDIA_API_KEY",
+ packageName: "langchain-nvidia-ai-endpoints",
+ default: false,
+ shouldHide: hideNvidia,
+ },
+ {
+ value: "Fake",
+ label: "Fake",
text: `from langchain_core.embeddings import FakeEmbeddings\n\n${embeddingVarName} = FakeEmbeddings(${fakeEmbeddingParamsOrDefault})`,
apiKeyName: undefined,
packageName: "langchain-core",
diff --git a/docs/src/theme/FeatureTables.js b/docs/src/theme/FeatureTables.js
index a7b31cdc291..afa27742aae 100644
--- a/docs/src/theme/FeatureTables.js
+++ b/docs/src/theme/FeatureTables.js
@@ -406,7 +406,7 @@ const FEATURE_TABLES = {
},
{
name: "NVIDIA",
- link: "NVIDIAEmbeddings",
+ link: "nvidia_ai_endpoints",
package: "langchain-nvidia",
apiLink: "https://python.langchain.com/api_reference/nvidia_ai_endpoints/embeddings/langchain_nvidia_ai_endpoints.embeddings.NVIDIAEmbeddings.html"
},
diff --git a/docs/src/theme/VectorStoreTabs.js b/docs/src/theme/VectorStoreTabs.js
new file mode 100644
index 00000000000..52c20fba515
--- /dev/null
+++ b/docs/src/theme/VectorStoreTabs.js
@@ -0,0 +1,92 @@
+import React from "react";
+import Tabs from "@theme/Tabs";
+import TabItem from "@theme/TabItem";
+import CodeBlock from "@theme-original/CodeBlock";
+
+export default function VectorStoreTabs(props) {
+ const { customVarName } = props;
+
+ const vectorStoreVarName = customVarName ?? "vector_store";
+
+ const tabItems = [
+ {
+ value: "In-memory",
+ label: "In-memory",
+ text: `from langchain_core.vector_stores import InMemoryVectorStore\n\n${vectorStoreVarName} = InMemoryVectorStore(embeddings)`,
+ packageName: "langchain-core",
+ default: true,
+ },
+ {
+ value: "AstraDB",
+ label: "AstraDB",
+ text: `from langchain_astradb import AstraDBVectorStore\n\n${vectorStoreVarName} = AstraDBVectorStore(\n embedding=embeddings,\n api_endpoint=ASTRA_DB_API_ENDPOINT,\n collection_name="astra_vector_langchain",\n token=ASTRA_DB_APPLICATION_TOKEN,\n namespace=ASTRA_DB_NAMESPACE,\n)`,
+ packageName: "langchain-astradb",
+ default: false,
+ },
+ {
+ value: "Chroma",
+ label: "Chroma",
+ text: `from langchain_chroma import Chroma\n\n${vectorStoreVarName} = Chroma(embedding_function=embeddings)`,
+ packageName: "langchain-chroma",
+ default: false,
+ },
+ {
+ value: "FAISS",
+ label: "FAISS",
+ text: `from langchain_community.vectorstores import FAISS\n\n${vectorStoreVarName} = FAISS(embedding_function=embeddings)`,
+ packageName: "langchain-community",
+ default: false,
+ },
+ {
+ value: "Milvus",
+ label: "Milvus",
+ text: `from langchain_milvus import Milvus\n\n${vectorStoreVarName} = Milvus(embedding_function=embeddings)`,
+ packageName: "langchain-milvus",
+ default: false,
+ },
+ {
+ value: "MongoDB",
+ label: "MongoDB",
+ text: `from langchain_mongodb import MongoDBAtlasVectorSearch\n\n${vectorStoreVarName} = MongoDBAtlasVectorSearch(\n embedding=embeddings,\n collection=MONGODB_COLLECTION,\n index_name=ATLAS_VECTOR_SEARCH_INDEX_NAME,\n relevance_score_fn="cosine",\n)`,
+ packageName: "langchain-mongodb",
+ default: false,
+ },
+ {
+ value: "PGVector",
+ label: "PGVector",
+ text: `from langchain_postgres import PGVector\n\n${vectorStoreVarName} = PGVector(\n embedding=embeddings,\n collection_name="my_docs",\n connection="postgresql+psycopg://...",\n)`,
+ packageName: "langchain-postgres",
+ default: false,
+ },
+ {
+ value: "Pinecone",
+ label: "Pinecone",
+ text: `from langchain_pinecone import PineconeVectorStore\nfrom pinecone import Pinecone\n\npc = Pinecone(api_key=...)\nindex = pc.Index(index_name)\n\n${vectorStoreVarName} = PineconeVectorStore(embedding=embeddings, index=index)`,
+ packageName: "langchain-pinecone",
+ default: false,
+ },
+ {
+ value: "Qdrant",
+ label: "Qdrant",
+ text: `from langchain_qdrant import QdrantVectorStore\nfrom qdrant_client import QdrantClient\n\nclient = QdrantClient(":memory:")\n${vectorStoreVarName} = QdrantVectorStore(\n client=client,\n collection_name="test",\n embedding=embeddings,\n)`,
+ packageName: "langchain-qdrant",
+ default: false,
+ },
+ ];
+
+ return (
+
+ {tabItems.map((tabItem) => (
+
+ {`pip install -qU ${tabItem.packageName}`}
+ {tabItem.text}
+
+ ))}
+
+ );
+}