diff --git a/docs/docs/guides/model_laboratory.ipynb b/docs/docs/guides/model_laboratory.ipynb index e36fe1b0bd3..5e87c0102fe 100644 --- a/docs/docs/guides/model_laboratory.ipynb +++ b/docs/docs/guides/model_laboratory.ipynb @@ -167,7 +167,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.chains import SelfAskWithSearchChain\n", + "from langchain.agents.self_ask_with_search.base import SelfAskWithSearchChain\n", "from langchain_community.utilities import SerpAPIWrapper\n", "\n", "open_ai_llm = OpenAI(temperature=0)\n", diff --git a/docs/docs/integrations/document_loaders/apify_dataset.ipynb b/docs/docs/integrations/document_loaders/apify_dataset.ipynb index 61aa0fa85d8..70184639920 100644 --- a/docs/docs/integrations/document_loaders/apify_dataset.ipynb +++ b/docs/docs/integrations/document_loaders/apify_dataset.ipynb @@ -41,7 +41,7 @@ "outputs": [], "source": [ "from langchain_community.document_loaders import ApifyDatasetLoader\n", - "from langchain_community.document_loaders.base import Document" + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/document_loaders/quip.ipynb b/docs/docs/integrations/document_loaders/quip.ipynb index 0eb6f87da25..5c567d98a67 100644 --- a/docs/docs/integrations/document_loaders/quip.ipynb +++ b/docs/docs/integrations/document_loaders/quip.ipynb @@ -61,7 +61,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain_community.document_loaders import QuipLoader\n", + "from langchain_community.document_loaders.quip import QuipLoader\n", "\n", "loader = QuipLoader(\n", " api_url=\"https://platform.quip.com\", access_token=\"change_me\", request_timeout=60\n", diff --git a/docs/docs/integrations/llms/llm_caching.ipynb b/docs/docs/integrations/llms/llm_caching.ipynb index c5cd1a54e86..54971621115 100644 --- a/docs/docs/integrations/llms/llm_caching.ipynb +++ b/docs/docs/integrations/llms/llm_caching.ipynb @@ -1378,7 +1378,7 @@ }, "outputs": [], "source": [ - "from langchain.cache import AzureCosmosDBSemanticCache\n", + "from langchain_community.cache import AzureCosmosDBSemanticCache\n", "from langchain_community.vectorstores.azure_cosmos_db import (\n", " CosmosDBSimilarityType,\n", " CosmosDBVectorSearchType,\n", diff --git a/docs/docs/integrations/memory/xata_chat_message_history.ipynb b/docs/docs/integrations/memory/xata_chat_message_history.ipynb index 3c55b84d6f0..61b66fda398 100644 --- a/docs/docs/integrations/memory/xata_chat_message_history.ipynb +++ b/docs/docs/integrations/memory/xata_chat_message_history.ipynb @@ -154,7 +154,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain_community.vectorstores import XataVectorStore\n", + "from langchain_community.vectorstores.xata import XataVectorStore\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()\n", diff --git a/docs/docs/integrations/retrievers/cohere-reranker.ipynb b/docs/docs/integrations/retrievers/cohere-reranker.ipynb index 7c953ef8a8a..5602e66d9f5 100644 --- a/docs/docs/integrations/retrievers/cohere-reranker.ipynb +++ b/docs/docs/integrations/retrievers/cohere-reranker.ipynb @@ -334,7 +334,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.retrievers import CohereRerank, ContextualCompressionRetriever\n", + "from langchain.retrievers.contextual_compression import ContextualCompressionRetriever\n", + "from langchain_cohere import CohereRerank\n", "from langchain_community.llms import Cohere\n", "\n", "llm = Cohere(temperature=0)\n", diff --git a/docs/docs/integrations/retrievers/jaguar.ipynb b/docs/docs/integrations/retrievers/jaguar.ipynb index efb063b2858..3d3287a69ee 100644 --- a/docs/docs/integrations/retrievers/jaguar.ipynb +++ b/docs/docs/integrations/retrievers/jaguar.ipynb @@ -53,7 +53,7 @@ "outputs": [], "source": [ "from langchain_community.document_loaders import TextLoader\n", - "from langchain_community.vectorstores import Jaguar\n", + "from langchain_community.vectorstores.jaguar import Jaguar\n", "from langchain_openai import OpenAIEmbeddings\n", "from langchain_text_splitters import CharacterTextSplitter\n", "\n", @@ -147,7 +147,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain_community.vectorstores import Jaguar\n", + "from langchain_community.vectorstores.jaguar import Jaguar\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "# Instantiate a Jaguar vector store object\n", diff --git a/docs/docs/integrations/retrievers/llmlingua.ipynb b/docs/docs/integrations/retrievers/llmlingua.ipynb index 3e56f726d84..49946f0db1e 100644 --- a/docs/docs/integrations/retrievers/llmlingua.ipynb +++ b/docs/docs/integrations/retrievers/llmlingua.ipynb @@ -340,7 +340,7 @@ ], "source": [ "from langchain.retrievers import ContextualCompressionRetriever\n", - "from langchain_community.retrievers import LLMLinguaCompressor\n", + "from langchain_community.document_compressors import LLMLinguaCompressor\n", "from langchain_openai import ChatOpenAI\n", "\n", "llm = ChatOpenAI(temperature=0)\n", diff --git a/docs/docs/integrations/toolkits/airbyte_structured_qa.ipynb b/docs/docs/integrations/toolkits/airbyte_structured_qa.ipynb index a916301f330..68db356ce7c 100644 --- a/docs/docs/integrations/toolkits/airbyte_structured_qa.ipynb +++ b/docs/docs/integrations/toolkits/airbyte_structured_qa.ipynb @@ -28,8 +28,9 @@ "import os\n", "\n", "import pandas as pd\n", - "from langchain.agents import AgentType, create_pandas_dataframe_agent\n", + "from langchain.agents import AgentType\n", "from langchain_community.document_loaders.airbyte import AirbyteStripeLoader\n", + "from langchain_experimental.agents import create_pandas_dataframe_agent\n", "from langchain_openai import ChatOpenAI\n", "\n", "stream_name = \"customers\"\n", diff --git a/docs/docs/integrations/toolkits/spark.ipynb b/docs/docs/integrations/toolkits/spark.ipynb index f36a6bc1bab..9b0cad6bf4b 100644 --- a/docs/docs/integrations/toolkits/spark.ipynb +++ b/docs/docs/integrations/toolkits/spark.ipynb @@ -334,7 +334,7 @@ "source": [ "import os\n", "\n", - "from langchain.agents import create_spark_dataframe_agent\n", + "from langchain_experimental.agents import create_spark_dataframe_agent\n", "from langchain_openai import OpenAI\n", "\n", "os.environ[\"OPENAI_API_KEY\"] = \"...input your openai api key here...\"\n", diff --git a/docs/docs/integrations/toolkits/xorbits.ipynb b/docs/docs/integrations/toolkits/xorbits.ipynb index 50e88e16a1e..e5f81d0de8e 100644 --- a/docs/docs/integrations/toolkits/xorbits.ipynb +++ b/docs/docs/integrations/toolkits/xorbits.ipynb @@ -380,7 +380,7 @@ ], "source": [ "import xorbits.numpy as np\n", - "from langchain.agents import create_xorbits_agent\n", + "from langchain_experimental.agents.agent_toolkits import create_xorbits_agent\n", "from langchain_openai import OpenAI\n", "\n", "arr = np.array([1, 2, 3, 4, 5, 6])\n", diff --git a/docs/docs/integrations/tools/google_drive.ipynb b/docs/docs/integrations/tools/google_drive.ipynb index cb3840ad4ea..544411db67b 100644 --- a/docs/docs/integrations/tools/google_drive.ipynb +++ b/docs/docs/integrations/tools/google_drive.ipynb @@ -99,8 +99,8 @@ }, "outputs": [], "source": [ - "from langchain_community.tools.google_drive.tool import GoogleDriveSearchTool\n", - "from langchain_community.utilities.google_drive import GoogleDriveAPIWrapper\n", + "from langchain_googldrive.tools.google_drive.tool import GoogleDriveSearchTool\n", + "from langchain_googledrive.utilities.google_drive import GoogleDriveAPIWrapper\n", "\n", "# By default, search only in the filename.\n", "tool = GoogleDriveSearchTool(\n", diff --git a/docs/docs/integrations/vectorstores/vikingdb.ipynb b/docs/docs/integrations/vectorstores/vikingdb.ipynb index af44c2456be..e309f7b7d9b 100644 --- a/docs/docs/integrations/vectorstores/vikingdb.ipynb +++ b/docs/docs/integrations/vectorstores/vikingdb.ipynb @@ -69,7 +69,7 @@ "outputs": [], "source": [ "from langchain.document_loaders import TextLoader\n", - "from langchain.vectorstores.vikingdb import VikingDB, VikingDBConfig\n", + "from langchain_community.vectorstores.vikingdb import VikingDB, VikingDBConfig\n", "from langchain_openai import OpenAIEmbeddings\n", "from langchain_text_splitters import RecursiveCharacterTextSplitter" ] diff --git a/docs/docs/modules/callbacks/async_callbacks.ipynb b/docs/docs/modules/callbacks/async_callbacks.ipynb index a1326a5cc61..90c8537e204 100644 --- a/docs/docs/modules/callbacks/async_callbacks.ipynb +++ b/docs/docs/modules/callbacks/async_callbacks.ipynb @@ -62,7 +62,8 @@ "from typing import Any, Dict, List\n", "\n", "from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler\n", - "from langchain_core.messages import HumanMessage, LLMResult\n", + "from langchain_core.messages import HumanMessage\n", + "from langchain_core.outputs import LLMResult\n", "from langchain_openai import ChatOpenAI\n", "\n", "\n", diff --git a/docs/docs/use_cases/graph/integrations/graph_gremlin_cosmosdb_qa.ipynb b/docs/docs/use_cases/graph/integrations/graph_gremlin_cosmosdb_qa.ipynb index 3ae5d2f2d8b..a8ff5dbd981 100644 --- a/docs/docs/use_cases/graph/integrations/graph_gremlin_cosmosdb_qa.ipynb +++ b/docs/docs/use_cases/graph/integrations/graph_gremlin_cosmosdb_qa.ipynb @@ -28,7 +28,7 @@ "outputs": [], "source": [ "import nest_asyncio\n", - "from langchain.chains.graph_qa import GremlinQAChain\n", + "from langchain.chains.graph_qa.gremlin import GremlinQAChain\n", "from langchain.schema import Document\n", "from langchain_community.graphs import GremlinGraph\n", "from langchain_community.graphs.graph_document import GraphDocument, Node, Relationship\n",