mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-24 07:35:18 +00:00
docs: Fix broken imports in documentation (#19655)
Found via script in https://github.com/langchain-ai/langchain/pull/19611
This commit is contained in:
parent
0019d8a948
commit
4e9b358ed8
@ -167,7 +167,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import SelfAskWithSearchChain\n",
|
||||
"from langchain.agents.self_ask_with_search.base import SelfAskWithSearchChain\n",
|
||||
"from langchain_community.utilities import SerpAPIWrapper\n",
|
||||
"\n",
|
||||
"open_ai_llm = OpenAI(temperature=0)\n",
|
||||
|
@ -41,7 +41,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.document_loaders import ApifyDatasetLoader\n",
|
||||
"from langchain_community.document_loaders.base import Document"
|
||||
"from langchain_core.documents import Document"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -61,7 +61,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.document_loaders import QuipLoader\n",
|
||||
"from langchain_community.document_loaders.quip import QuipLoader\n",
|
||||
"\n",
|
||||
"loader = QuipLoader(\n",
|
||||
" api_url=\"https://platform.quip.com\", access_token=\"change_me\", request_timeout=60\n",
|
||||
|
@ -1378,7 +1378,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.cache import AzureCosmosDBSemanticCache\n",
|
||||
"from langchain_community.cache import AzureCosmosDBSemanticCache\n",
|
||||
"from langchain_community.vectorstores.azure_cosmos_db import (\n",
|
||||
" CosmosDBSimilarityType,\n",
|
||||
" CosmosDBVectorSearchType,\n",
|
||||
|
@ -154,7 +154,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.vectorstores import XataVectorStore\n",
|
||||
"from langchain_community.vectorstores.xata import XataVectorStore\n",
|
||||
"from langchain_openai import OpenAIEmbeddings\n",
|
||||
"\n",
|
||||
"embeddings = OpenAIEmbeddings()\n",
|
||||
|
@ -334,7 +334,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.retrievers import CohereRerank, ContextualCompressionRetriever\n",
|
||||
"from langchain.retrievers.contextual_compression import ContextualCompressionRetriever\n",
|
||||
"from langchain_cohere import CohereRerank\n",
|
||||
"from langchain_community.llms import Cohere\n",
|
||||
"\n",
|
||||
"llm = Cohere(temperature=0)\n",
|
||||
|
@ -53,7 +53,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.document_loaders import TextLoader\n",
|
||||
"from langchain_community.vectorstores import Jaguar\n",
|
||||
"from langchain_community.vectorstores.jaguar import Jaguar\n",
|
||||
"from langchain_openai import OpenAIEmbeddings\n",
|
||||
"from langchain_text_splitters import CharacterTextSplitter\n",
|
||||
"\n",
|
||||
@ -147,7 +147,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.vectorstores import Jaguar\n",
|
||||
"from langchain_community.vectorstores.jaguar import Jaguar\n",
|
||||
"from langchain_openai import OpenAIEmbeddings\n",
|
||||
"\n",
|
||||
"# Instantiate a Jaguar vector store object\n",
|
||||
|
@ -340,7 +340,7 @@
|
||||
],
|
||||
"source": [
|
||||
"from langchain.retrievers import ContextualCompressionRetriever\n",
|
||||
"from langchain_community.retrievers import LLMLinguaCompressor\n",
|
||||
"from langchain_community.document_compressors import LLMLinguaCompressor\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(temperature=0)\n",
|
||||
|
@ -28,8 +28,9 @@
|
||||
"import os\n",
|
||||
"\n",
|
||||
"import pandas as pd\n",
|
||||
"from langchain.agents import AgentType, create_pandas_dataframe_agent\n",
|
||||
"from langchain.agents import AgentType\n",
|
||||
"from langchain_community.document_loaders.airbyte import AirbyteStripeLoader\n",
|
||||
"from langchain_experimental.agents import create_pandas_dataframe_agent\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"\n",
|
||||
"stream_name = \"customers\"\n",
|
||||
|
@ -334,7 +334,7 @@
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.agents import create_spark_dataframe_agent\n",
|
||||
"from langchain_experimental.agents import create_spark_dataframe_agent\n",
|
||||
"from langchain_openai import OpenAI\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = \"...input your openai api key here...\"\n",
|
||||
|
@ -380,7 +380,7 @@
|
||||
],
|
||||
"source": [
|
||||
"import xorbits.numpy as np\n",
|
||||
"from langchain.agents import create_xorbits_agent\n",
|
||||
"from langchain_experimental.agents.agent_toolkits import create_xorbits_agent\n",
|
||||
"from langchain_openai import OpenAI\n",
|
||||
"\n",
|
||||
"arr = np.array([1, 2, 3, 4, 5, 6])\n",
|
||||
|
@ -99,8 +99,8 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.tools.google_drive.tool import GoogleDriveSearchTool\n",
|
||||
"from langchain_community.utilities.google_drive import GoogleDriveAPIWrapper\n",
|
||||
"from langchain_googldrive.tools.google_drive.tool import GoogleDriveSearchTool\n",
|
||||
"from langchain_googledrive.utilities.google_drive import GoogleDriveAPIWrapper\n",
|
||||
"\n",
|
||||
"# By default, search only in the filename.\n",
|
||||
"tool = GoogleDriveSearchTool(\n",
|
||||
|
@ -69,7 +69,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.document_loaders import TextLoader\n",
|
||||
"from langchain.vectorstores.vikingdb import VikingDB, VikingDBConfig\n",
|
||||
"from langchain_community.vectorstores.vikingdb import VikingDB, VikingDBConfig\n",
|
||||
"from langchain_openai import OpenAIEmbeddings\n",
|
||||
"from langchain_text_splitters import RecursiveCharacterTextSplitter"
|
||||
]
|
||||
|
@ -62,7 +62,8 @@
|
||||
"from typing import Any, Dict, List\n",
|
||||
"\n",
|
||||
"from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler\n",
|
||||
"from langchain_core.messages import HumanMessage, LLMResult\n",
|
||||
"from langchain_core.messages import HumanMessage\n",
|
||||
"from langchain_core.outputs import LLMResult\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
@ -28,7 +28,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import nest_asyncio\n",
|
||||
"from langchain.chains.graph_qa import GremlinQAChain\n",
|
||||
"from langchain.chains.graph_qa.gremlin import GremlinQAChain\n",
|
||||
"from langchain.schema import Document\n",
|
||||
"from langchain_community.graphs import GremlinGraph\n",
|
||||
"from langchain_community.graphs.graph_document import GraphDocument, Node, Relationship\n",
|
||||
|
Loading…
Reference in New Issue
Block a user