mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-25 08:03:39 +00:00
docs: Fix broken imports in documentation (#19655)
Found via script in https://github.com/langchain-ai/langchain/pull/19611
This commit is contained in:
parent
0019d8a948
commit
4e9b358ed8
@ -167,7 +167,7 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain.chains import SelfAskWithSearchChain\n",
|
"from langchain.agents.self_ask_with_search.base import SelfAskWithSearchChain\n",
|
||||||
"from langchain_community.utilities import SerpAPIWrapper\n",
|
"from langchain_community.utilities import SerpAPIWrapper\n",
|
||||||
"\n",
|
"\n",
|
||||||
"open_ai_llm = OpenAI(temperature=0)\n",
|
"open_ai_llm = OpenAI(temperature=0)\n",
|
||||||
|
@ -41,7 +41,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain_community.document_loaders import ApifyDatasetLoader\n",
|
"from langchain_community.document_loaders import ApifyDatasetLoader\n",
|
||||||
"from langchain_community.document_loaders.base import Document"
|
"from langchain_core.documents import Document"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -61,7 +61,7 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain_community.document_loaders import QuipLoader\n",
|
"from langchain_community.document_loaders.quip import QuipLoader\n",
|
||||||
"\n",
|
"\n",
|
||||||
"loader = QuipLoader(\n",
|
"loader = QuipLoader(\n",
|
||||||
" api_url=\"https://platform.quip.com\", access_token=\"change_me\", request_timeout=60\n",
|
" api_url=\"https://platform.quip.com\", access_token=\"change_me\", request_timeout=60\n",
|
||||||
|
@ -1378,7 +1378,7 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain.cache import AzureCosmosDBSemanticCache\n",
|
"from langchain_community.cache import AzureCosmosDBSemanticCache\n",
|
||||||
"from langchain_community.vectorstores.azure_cosmos_db import (\n",
|
"from langchain_community.vectorstores.azure_cosmos_db import (\n",
|
||||||
" CosmosDBSimilarityType,\n",
|
" CosmosDBSimilarityType,\n",
|
||||||
" CosmosDBVectorSearchType,\n",
|
" CosmosDBVectorSearchType,\n",
|
||||||
|
@ -154,7 +154,7 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain_community.vectorstores import XataVectorStore\n",
|
"from langchain_community.vectorstores.xata import XataVectorStore\n",
|
||||||
"from langchain_openai import OpenAIEmbeddings\n",
|
"from langchain_openai import OpenAIEmbeddings\n",
|
||||||
"\n",
|
"\n",
|
||||||
"embeddings = OpenAIEmbeddings()\n",
|
"embeddings = OpenAIEmbeddings()\n",
|
||||||
|
@ -334,7 +334,8 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain.retrievers import CohereRerank, ContextualCompressionRetriever\n",
|
"from langchain.retrievers.contextual_compression import ContextualCompressionRetriever\n",
|
||||||
|
"from langchain_cohere import CohereRerank\n",
|
||||||
"from langchain_community.llms import Cohere\n",
|
"from langchain_community.llms import Cohere\n",
|
||||||
"\n",
|
"\n",
|
||||||
"llm = Cohere(temperature=0)\n",
|
"llm = Cohere(temperature=0)\n",
|
||||||
|
@ -53,7 +53,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain_community.document_loaders import TextLoader\n",
|
"from langchain_community.document_loaders import TextLoader\n",
|
||||||
"from langchain_community.vectorstores import Jaguar\n",
|
"from langchain_community.vectorstores.jaguar import Jaguar\n",
|
||||||
"from langchain_openai import OpenAIEmbeddings\n",
|
"from langchain_openai import OpenAIEmbeddings\n",
|
||||||
"from langchain_text_splitters import CharacterTextSplitter\n",
|
"from langchain_text_splitters import CharacterTextSplitter\n",
|
||||||
"\n",
|
"\n",
|
||||||
@ -147,7 +147,7 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain_community.vectorstores import Jaguar\n",
|
"from langchain_community.vectorstores.jaguar import Jaguar\n",
|
||||||
"from langchain_openai import OpenAIEmbeddings\n",
|
"from langchain_openai import OpenAIEmbeddings\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Instantiate a Jaguar vector store object\n",
|
"# Instantiate a Jaguar vector store object\n",
|
||||||
|
@ -340,7 +340,7 @@
|
|||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain.retrievers import ContextualCompressionRetriever\n",
|
"from langchain.retrievers import ContextualCompressionRetriever\n",
|
||||||
"from langchain_community.retrievers import LLMLinguaCompressor\n",
|
"from langchain_community.document_compressors import LLMLinguaCompressor\n",
|
||||||
"from langchain_openai import ChatOpenAI\n",
|
"from langchain_openai import ChatOpenAI\n",
|
||||||
"\n",
|
"\n",
|
||||||
"llm = ChatOpenAI(temperature=0)\n",
|
"llm = ChatOpenAI(temperature=0)\n",
|
||||||
|
@ -28,8 +28,9 @@
|
|||||||
"import os\n",
|
"import os\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import pandas as pd\n",
|
"import pandas as pd\n",
|
||||||
"from langchain.agents import AgentType, create_pandas_dataframe_agent\n",
|
"from langchain.agents import AgentType\n",
|
||||||
"from langchain_community.document_loaders.airbyte import AirbyteStripeLoader\n",
|
"from langchain_community.document_loaders.airbyte import AirbyteStripeLoader\n",
|
||||||
|
"from langchain_experimental.agents import create_pandas_dataframe_agent\n",
|
||||||
"from langchain_openai import ChatOpenAI\n",
|
"from langchain_openai import ChatOpenAI\n",
|
||||||
"\n",
|
"\n",
|
||||||
"stream_name = \"customers\"\n",
|
"stream_name = \"customers\"\n",
|
||||||
|
@ -334,7 +334,7 @@
|
|||||||
"source": [
|
"source": [
|
||||||
"import os\n",
|
"import os\n",
|
||||||
"\n",
|
"\n",
|
||||||
"from langchain.agents import create_spark_dataframe_agent\n",
|
"from langchain_experimental.agents import create_spark_dataframe_agent\n",
|
||||||
"from langchain_openai import OpenAI\n",
|
"from langchain_openai import OpenAI\n",
|
||||||
"\n",
|
"\n",
|
||||||
"os.environ[\"OPENAI_API_KEY\"] = \"...input your openai api key here...\"\n",
|
"os.environ[\"OPENAI_API_KEY\"] = \"...input your openai api key here...\"\n",
|
||||||
|
@ -380,7 +380,7 @@
|
|||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"import xorbits.numpy as np\n",
|
"import xorbits.numpy as np\n",
|
||||||
"from langchain.agents import create_xorbits_agent\n",
|
"from langchain_experimental.agents.agent_toolkits import create_xorbits_agent\n",
|
||||||
"from langchain_openai import OpenAI\n",
|
"from langchain_openai import OpenAI\n",
|
||||||
"\n",
|
"\n",
|
||||||
"arr = np.array([1, 2, 3, 4, 5, 6])\n",
|
"arr = np.array([1, 2, 3, 4, 5, 6])\n",
|
||||||
|
@ -99,8 +99,8 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain_community.tools.google_drive.tool import GoogleDriveSearchTool\n",
|
"from langchain_googldrive.tools.google_drive.tool import GoogleDriveSearchTool\n",
|
||||||
"from langchain_community.utilities.google_drive import GoogleDriveAPIWrapper\n",
|
"from langchain_googledrive.utilities.google_drive import GoogleDriveAPIWrapper\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# By default, search only in the filename.\n",
|
"# By default, search only in the filename.\n",
|
||||||
"tool = GoogleDriveSearchTool(\n",
|
"tool = GoogleDriveSearchTool(\n",
|
||||||
|
@ -69,7 +69,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain.document_loaders import TextLoader\n",
|
"from langchain.document_loaders import TextLoader\n",
|
||||||
"from langchain.vectorstores.vikingdb import VikingDB, VikingDBConfig\n",
|
"from langchain_community.vectorstores.vikingdb import VikingDB, VikingDBConfig\n",
|
||||||
"from langchain_openai import OpenAIEmbeddings\n",
|
"from langchain_openai import OpenAIEmbeddings\n",
|
||||||
"from langchain_text_splitters import RecursiveCharacterTextSplitter"
|
"from langchain_text_splitters import RecursiveCharacterTextSplitter"
|
||||||
]
|
]
|
||||||
|
@ -62,7 +62,8 @@
|
|||||||
"from typing import Any, Dict, List\n",
|
"from typing import Any, Dict, List\n",
|
||||||
"\n",
|
"\n",
|
||||||
"from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler\n",
|
"from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler\n",
|
||||||
"from langchain_core.messages import HumanMessage, LLMResult\n",
|
"from langchain_core.messages import HumanMessage\n",
|
||||||
|
"from langchain_core.outputs import LLMResult\n",
|
||||||
"from langchain_openai import ChatOpenAI\n",
|
"from langchain_openai import ChatOpenAI\n",
|
||||||
"\n",
|
"\n",
|
||||||
"\n",
|
"\n",
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"import nest_asyncio\n",
|
"import nest_asyncio\n",
|
||||||
"from langchain.chains.graph_qa import GremlinQAChain\n",
|
"from langchain.chains.graph_qa.gremlin import GremlinQAChain\n",
|
||||||
"from langchain.schema import Document\n",
|
"from langchain.schema import Document\n",
|
||||||
"from langchain_community.graphs import GremlinGraph\n",
|
"from langchain_community.graphs import GremlinGraph\n",
|
||||||
"from langchain_community.graphs.graph_document import GraphDocument, Node, Relationship\n",
|
"from langchain_community.graphs.graph_document import GraphDocument, Node, Relationship\n",
|
||||||
|
Loading…
Reference in New Issue
Block a user