docs: add component tabs to integration landing pages (#28142)

- Add to embedding model tabs
- Add tabs for vector stores
- Add "hello world" examples in integration landing pages using tabs
This commit is contained in:
ccurme 2024-11-18 13:34:35 -05:00 committed by GitHub
parent c26b3575f8
commit a1db744b20
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 232 additions and 90 deletions

View File

@ -15,87 +15,9 @@ The base Embeddings class in LangChain provides two methods: one for embedding d
### Setup
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
import EmbeddingTabs from "@theme/EmbeddingTabs";
<Tabs>
<TabItem value="openai" label="OpenAI" default>
To start we'll need to install the OpenAI partner package:
```bash
pip install langchain-openai
```
Accessing the API requires an API key, which you can get by creating an account and heading [here](https://platform.openai.com/account/api-keys). Once we have a key we'll want to set it as an environment variable by running:
```bash
export OPENAI_API_KEY="..."
```
If you'd prefer not to set an environment variable you can pass the key in directly via the `api_key` named parameter when initiating the OpenAI LLM class:
```python
from langchain_openai import OpenAIEmbeddings
embeddings_model = OpenAIEmbeddings(api_key="...")
```
Otherwise you can initialize without any params:
```python
from langchain_openai import OpenAIEmbeddings
embeddings_model = OpenAIEmbeddings()
```
</TabItem>
<TabItem value="cohere" label="Cohere">
To start we'll need to install the Cohere SDK package:
```bash
pip install langchain-cohere
```
Accessing the API requires an API key, which you can get by creating an account and heading [here](https://dashboard.cohere.com/api-keys). Once we have a key we'll want to set it as an environment variable by running:
```shell
export COHERE_API_KEY="..."
```
If you'd prefer not to set an environment variable you can pass the key in directly via the `cohere_api_key` named parameter when initiating the Cohere LLM class:
```python
from langchain_cohere import CohereEmbeddings
embeddings_model = CohereEmbeddings(cohere_api_key="...", model='embed-english-v3.0')
```
Otherwise you can initialize simply as shown below:
```python
from langchain_cohere import CohereEmbeddings
embeddings_model = CohereEmbeddings(model='embed-english-v3.0')
```
Do note that it is mandatory to pass the model parameter while initializing the CohereEmbeddings class.
</TabItem>
<TabItem value="huggingface" label="Hugging Face">
To start we'll need to install the Hugging Face partner package:
```bash
pip install langchain-huggingface
```
You can then load any [Sentence Transformers model](https://huggingface.co/models?library=sentence-transformers) from the Hugging Face Hub.
```python
from langchain_huggingface import HuggingFaceEmbeddings
embeddings_model = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
```
</TabItem>
</Tabs>
<EmbeddingTabs customVarName="embeddings_model" />
### `embed_documents`
#### Embed list of texts

View File

@ -15,6 +15,14 @@ If you'd like to contribute an integration, see [Contributing integrations](/doc
:::
import ChatModelTabs from "@theme/ChatModelTabs";
<ChatModelTabs openaiParams={`model="gpt-4o-mini"`} />
```python
model.invoke("Hello, world!")
```
## Featured Providers
:::info

View File

@ -45,7 +45,7 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": null,
"id": "36521c2a",
"metadata": {},
"outputs": [],
@ -53,8 +53,10 @@
"import getpass\n",
"import os\n",
"\n",
"if not os.getenv(\"OPENAI_API_KEY\"):\n",
" os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"Enter your AzureOpenAI API key: \")"
"if not os.getenv(\"AZURE_OPENAI_API_KEY\"):\n",
" os.environ[\"AZURE_OPENAI_API_KEY\"] = getpass.getpass(\n",
" \"Enter your AzureOpenAI API key: \"\n",
" )"
]
},
{

View File

@ -31,12 +31,12 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": null,
"id": "282239c8-e03a-4abc-86c1-ca6120231a20",
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.embeddings import BedrockEmbeddings\n",
"from langchain_aws import BedrockEmbeddings\n",
"\n",
"embeddings = BedrockEmbeddings(\n",
" credentials_profile_name=\"bedrock-admin\", region_name=\"us-east-1\"\n",

View File

@ -11,6 +11,14 @@ import { CategoryTable, IndexTable } from "@theme/FeatureTables";
This page documents integrations with various model providers that allow you to use embeddings in LangChain.
import EmbeddingTabs from "@theme/EmbeddingTabs";
<EmbeddingTabs/>
```python
embeddings.embed_query("Hello, world!")
```
<CategoryTable category="text_embedding" />
## All embedding models

View File

@ -3,12 +3,24 @@ sidebar_position: 0
sidebar_class_name: hidden
---
# Vectorstores
# Vector stores
import { CategoryTable, IndexTable } from "@theme/FeatureTables";
A [vector store](/docs/concepts/vectorstores) stores [embedded](/docs/concepts/embedding_models) data and performs similarity search.
**Select embedding model:**
import EmbeddingTabs from "@theme/EmbeddingTabs";
<EmbeddingTabs/>
**Select vector store:**
import VectorStoreTabs from "@theme/VectorStoreTabs";
<VectorStoreTabs/>
<CategoryTable category="vectorstores" />
## All Vectorstores

View File

@ -114,7 +114,7 @@ export default function ChatModelTabs(props) {
value: "Google",
label: "Google",
text: `from langchain_google_vertexai import ChatVertexAI\n\n${llmVarName} = ChatVertexAI(${googleParamsOrDefault})`,
apiKeyName: "GOOGLE_API_KEY",
apiKeyText: "# Ensure your VertexAI credentials are configured",
packageName: "langchain-google-vertexai",
default: false,
shouldHide: hideGoogle,

View File

@ -7,15 +7,41 @@ export default function EmbeddingTabs(props) {
const {
openaiParams,
hideOpenai,
azureOpenaiParams,
hideAzureOpenai,
googleParams,
hideGoogle,
awsParams,
hideAws,
huggingFaceParams,
hideHuggingFace,
ollamaParams,
hideOllama,
cohereParams,
hideCohere,
mistralParams,
hideMistral,
nomicParams,
hideNomic,
nvidiaParams,
hideNvidia,
fakeEmbeddingParams,
hideFakeEmbedding,
customVarName,
} = props;
const openAIParamsOrDefault = openaiParams ?? `model="text-embedding-3-large"`;
const azureParamsOrDefault =
azureOpenaiParams ??
`\n azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],\n azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"],\n openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],\n`;
const googleParamsOrDefault = googleParams ?? `model="text-embedding-004"`;
const awsParamsOrDefault = awsParams ?? `model_id="amazon.titan-embed-text-v2:0"`;
const huggingFaceParamsOrDefault = huggingFaceParams ?? `model="sentence-transformers/all-mpnet-base-v2"`;
const ollamaParamsOrDefault = ollamaParams ?? `model="llama3"`;
const cohereParamsOrDefault = cohereParams ?? `model="embed-english-v3.0"`;
const mistralParamsOrDefault = mistralParams ?? `model="mistral-embed"`;
const nomicsParamsOrDefault = nomicParams ?? `model="nomic-embed-text-v1.5"`;
const nvidiaParamsOrDefault = nvidiaParams ?? `model="NV-Embed-QA"`;
const fakeEmbeddingParamsOrDefault = fakeEmbeddingParams ?? `size=4096`;
const embeddingVarName = customVarName ?? "embeddings";
@ -30,6 +56,33 @@ export default function EmbeddingTabs(props) {
default: true,
shouldHide: hideOpenai,
},
{
value: "Azure",
label: "Azure",
text: `from langchain_openai import AzureOpenAIEmbeddings\n\n${embeddingVarName} = AzureOpenAIEmbeddings(${azureParamsOrDefault})`,
apiKeyName: "AZURE_OPENAI_API_KEY",
packageName: "langchain-openai",
default: false,
shouldHide: hideAzureOpenai,
},
{
value: "Google",
label: "Google",
text: `from langchain_google_vertexai import VertexAIEmbeddings\n\n${embeddingVarName} = VertexAIEmbeddings(${googleParamsOrDefault})`,
apiKeyName: undefined,
packageName: "langchain-google-vertexai",
default: false,
shouldHide: hideGoogle,
},
{
value: "AWS",
label: "AWS",
text: `from langchain_aws import BedrockEmbeddings\n\n${embeddingVarName} = BedrockEmbeddings(${awsParamsOrDefault})`,
apiKeyName: undefined,
packageName: "langchain-aws",
default: false,
shouldHide: hideAws,
},
{
value: "HuggingFace",
label: "HuggingFace",
@ -40,8 +93,53 @@ export default function EmbeddingTabs(props) {
shouldHide: hideHuggingFace,
},
{
value: "Fake Embedding",
label: "Fake Embedding",
value: "Ollama",
label: "Ollama",
text: `from langchain_ollama import OllamaEmbeddings\n\n${embeddingVarName} = OllamaEmbeddings(${ollamaParamsOrDefault})`,
apiKeyName: undefined,
packageName: "langchain-ollama",
default: false,
shouldHide: hideOllama,
},
{
value: "Cohere",
label: "Cohere",
text: `from langchain_cohere import CohereEmbeddings\n\n${embeddingVarName} = CohereEmbeddings(${cohereParamsOrDefault})`,
apiKeyName: "COHERE_API_KEY",
packageName: "langchain-cohere",
default: false,
shouldHide: hideCohere,
},
{
value: "MistralAI",
label: "MistralAI",
text: `from langchain_mistralai import MistralAIEmbeddings\n\n${embeddingVarName} = MistralAIEmbeddings(${mistralParamsOrDefault})`,
apiKeyName: "MISTRALAI_API_KEY",
packageName: "langchain-mistralai",
default: false,
shouldHide: hideMistral,
},
{
value: "Nomic",
label: "Nomic",
text: `from langchain_nomic import NomicEmbeddings\n\n${embeddingVarName} = NomicEmbeddings(${nomicsParamsOrDefault})`,
apiKeyName: "NOMIC_API_KEY",
packageName: "langchain-nomic",
default: false,
shouldHide: hideNomic,
},
{
value: "NVIDIA",
label: "NVIDIA",
text: `from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings\n\n${embeddingVarName} = NVIDIAEmbeddings(${nvidiaParamsOrDefault})`,
apiKeyName: "NVIDIA_API_KEY",
packageName: "langchain-nvidia-ai-endpoints",
default: false,
shouldHide: hideNvidia,
},
{
value: "Fake",
label: "Fake",
text: `from langchain_core.embeddings import FakeEmbeddings\n\n${embeddingVarName} = FakeEmbeddings(${fakeEmbeddingParamsOrDefault})`,
apiKeyName: undefined,
packageName: "langchain-core",

View File

@ -406,7 +406,7 @@ const FEATURE_TABLES = {
},
{
name: "NVIDIA",
link: "NVIDIAEmbeddings",
link: "nvidia_ai_endpoints",
package: "langchain-nvidia",
apiLink: "https://python.langchain.com/api_reference/nvidia_ai_endpoints/embeddings/langchain_nvidia_ai_endpoints.embeddings.NVIDIAEmbeddings.html"
},

View File

@ -0,0 +1,92 @@
import React from "react";
import Tabs from "@theme/Tabs";
import TabItem from "@theme/TabItem";
import CodeBlock from "@theme-original/CodeBlock";
export default function VectorStoreTabs(props) {
const { customVarName } = props;
const vectorStoreVarName = customVarName ?? "vector_store";
const tabItems = [
{
value: "In-memory",
label: "In-memory",
text: `from langchain_core.vector_stores import InMemoryVectorStore\n\n${vectorStoreVarName} = InMemoryVectorStore(embeddings)`,
packageName: "langchain-core",
default: true,
},
{
value: "AstraDB",
label: "AstraDB",
text: `from langchain_astradb import AstraDBVectorStore\n\n${vectorStoreVarName} = AstraDBVectorStore(\n embedding=embeddings,\n api_endpoint=ASTRA_DB_API_ENDPOINT,\n collection_name="astra_vector_langchain",\n token=ASTRA_DB_APPLICATION_TOKEN,\n namespace=ASTRA_DB_NAMESPACE,\n)`,
packageName: "langchain-astradb",
default: false,
},
{
value: "Chroma",
label: "Chroma",
text: `from langchain_chroma import Chroma\n\n${vectorStoreVarName} = Chroma(embedding_function=embeddings)`,
packageName: "langchain-chroma",
default: false,
},
{
value: "FAISS",
label: "FAISS",
text: `from langchain_community.vectorstores import FAISS\n\n${vectorStoreVarName} = FAISS(embedding_function=embeddings)`,
packageName: "langchain-community",
default: false,
},
{
value: "Milvus",
label: "Milvus",
text: `from langchain_milvus import Milvus\n\n${vectorStoreVarName} = Milvus(embedding_function=embeddings)`,
packageName: "langchain-milvus",
default: false,
},
{
value: "MongoDB",
label: "MongoDB",
text: `from langchain_mongodb import MongoDBAtlasVectorSearch\n\n${vectorStoreVarName} = MongoDBAtlasVectorSearch(\n embedding=embeddings,\n collection=MONGODB_COLLECTION,\n index_name=ATLAS_VECTOR_SEARCH_INDEX_NAME,\n relevance_score_fn="cosine",\n)`,
packageName: "langchain-mongodb",
default: false,
},
{
value: "PGVector",
label: "PGVector",
text: `from langchain_postgres import PGVector\n\n${vectorStoreVarName} = PGVector(\n embedding=embeddings,\n collection_name="my_docs",\n connection="postgresql+psycopg://...",\n)`,
packageName: "langchain-postgres",
default: false,
},
{
value: "Pinecone",
label: "Pinecone",
text: `from langchain_pinecone import PineconeVectorStore\nfrom pinecone import Pinecone\n\npc = Pinecone(api_key=...)\nindex = pc.Index(index_name)\n\n${vectorStoreVarName} = PineconeVectorStore(embedding=embeddings, index=index)`,
packageName: "langchain-pinecone",
default: false,
},
{
value: "Qdrant",
label: "Qdrant",
text: `from langchain_qdrant import QdrantVectorStore\nfrom qdrant_client import QdrantClient\n\nclient = QdrantClient(":memory:")\n${vectorStoreVarName} = QdrantVectorStore(\n client=client,\n collection_name="test",\n embedding=embeddings,\n)`,
packageName: "langchain-qdrant",
default: false,
},
];
return (
<Tabs groupId="vectorStoreTabs">
{tabItems.map((tabItem) => (
<TabItem
key={tabItem.value}
value={tabItem.value}
label={tabItem.label}
default={tabItem.default}
>
<CodeBlock language="bash">{`pip install -qU ${tabItem.packageName}`}</CodeBlock>
<CodeBlock language="python">{tabItem.text}</CodeBlock>
</TabItem>
))}
</Tabs>
);
}