diff --git a/cookbook/advanced_rag_eval.ipynb b/cookbook/advanced_rag_eval.ipynb index 45d424b452d..02e86817b29 100644 --- a/cookbook/advanced_rag_eval.ipynb +++ b/cookbook/advanced_rag_eval.ipynb @@ -520,7 +520,7 @@ "source": [ "import re\n", "\n", - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "from langchain_core.runnables import RunnableLambda\n", "\n", "\n", diff --git a/cookbook/apache_kafka_message_handling.ipynb b/cookbook/apache_kafka_message_handling.ipynb index 616c12ac680..36a0c07e965 100644 --- a/cookbook/apache_kafka_message_handling.ipynb +++ b/cookbook/apache_kafka_message_handling.ipynb @@ -167,7 +167,7 @@ "from langchain.llms import LlamaCpp\n", "from langchain.memory import ConversationTokenBufferMemory\n", "from langchain.prompts import PromptTemplate, load_prompt\n", - "from langchain.schema import SystemMessage\n", + "from langchain_core.messages import SystemMessage\n", "from langchain_experimental.chat_models import Llama2Chat\n", "from quixstreams import Application, State, message_key\n", "\n", diff --git a/cookbook/custom_agent_with_plugin_retrieval.ipynb b/cookbook/custom_agent_with_plugin_retrieval.ipynb index 9131599da0f..ac7545bcdbb 100644 --- a/cookbook/custom_agent_with_plugin_retrieval.ipynb +++ b/cookbook/custom_agent_with_plugin_retrieval.ipynb @@ -42,9 +42,9 @@ ")\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.schema import AgentAction, AgentFinish\n", "from langchain_community.agent_toolkits import NLAToolkit\n", "from langchain_community.tools.plugin import AIPlugin\n", + "from langchain_core.agents import AgentAction, AgentFinish\n", "from langchain_openai import OpenAI" ] }, @@ -114,8 +114,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings" ] }, diff --git a/cookbook/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb b/cookbook/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb index 30fc61712da..7dfa363ece9 100644 --- a/cookbook/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb +++ b/cookbook/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb @@ -67,9 +67,9 @@ ")\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.schema import AgentAction, AgentFinish\n", "from langchain_community.agent_toolkits import NLAToolkit\n", "from langchain_community.tools.plugin import AIPlugin\n", + "from langchain_core.agents import AgentAction, AgentFinish\n", "from langchain_openai import OpenAI" ] }, @@ -138,8 +138,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings" ] }, diff --git a/cookbook/custom_agent_with_tool_retrieval.ipynb b/cookbook/custom_agent_with_tool_retrieval.ipynb index 7981a13716b..73105ea68e3 100644 --- a/cookbook/custom_agent_with_tool_retrieval.ipynb +++ b/cookbook/custom_agent_with_tool_retrieval.ipynb @@ -40,8 +40,8 @@ ")\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.schema import AgentAction, AgentFinish\n", "from langchain_community.utilities import SerpAPIWrapper\n", + "from langchain_core.agents import AgentAction, AgentFinish\n", "from langchain_openai import OpenAI" ] }, @@ -103,8 +103,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings" ] }, diff --git a/cookbook/custom_multi_action_agent.ipynb b/cookbook/custom_multi_action_agent.ipynb index 271c4c0d816..c37a5bf9dd4 100644 --- a/cookbook/custom_multi_action_agent.ipynb +++ b/cookbook/custom_multi_action_agent.ipynb @@ -72,7 +72,7 @@ "source": [ "from typing import Any, List, Tuple, Union\n", "\n", - "from langchain.schema import AgentAction, AgentFinish\n", + "from langchain_core.agents import AgentAction, AgentFinish\n", "\n", "\n", "class FakeAgent(BaseMultiActionAgent):\n", diff --git a/cookbook/forward_looking_retrieval_augmented_generation.ipynb b/cookbook/forward_looking_retrieval_augmented_generation.ipynb index 0abfe0bfeff..4406c1812db 100644 --- a/cookbook/forward_looking_retrieval_augmented_generation.ipynb +++ b/cookbook/forward_looking_retrieval_augmented_generation.ipynb @@ -73,8 +73,9 @@ " AsyncCallbackManagerForRetrieverRun,\n", " CallbackManagerForRetrieverRun,\n", ")\n", - "from langchain.schema import BaseRetriever, Document\n", "from langchain_community.utilities import GoogleSerperAPIWrapper\n", + "from langchain_core.documents import Document\n", + "from langchain_core.retrievers import BaseRetriever\n", "from langchain_openai import ChatOpenAI, OpenAI" ] }, diff --git a/cookbook/openai_functions_retrieval_qa.ipynb b/cookbook/openai_functions_retrieval_qa.ipynb index 648b28b5e2c..621e997088e 100644 --- a/cookbook/openai_functions_retrieval_qa.ipynb +++ b/cookbook/openai_functions_retrieval_qa.ipynb @@ -358,7 +358,7 @@ "\n", "from langchain.chains.openai_functions import create_qa_with_structure_chain\n", "from langchain.prompts.chat import ChatPromptTemplate, HumanMessagePromptTemplate\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from pydantic import BaseModel, Field" ] }, diff --git a/cookbook/sales_agent_with_context.ipynb b/cookbook/sales_agent_with_context.ipynb index 158329a5f09..e125046af92 100644 --- a/cookbook/sales_agent_with_context.ipynb +++ b/cookbook/sales_agent_with_context.ipynb @@ -51,10 +51,10 @@ "from langchain.chains.base import Chain\n", "from langchain.prompts import PromptTemplate\n", "from langchain.prompts.base import StringPromptTemplate\n", - "from langchain.schema import AgentAction, AgentFinish\n", "from langchain.text_splitter import CharacterTextSplitter\n", "from langchain_community.llms import BaseLLM\n", "from langchain_community.vectorstores import Chroma\n", + "from langchain_core.agents import AgentAction, AgentFinish\n", "from langchain_openai import ChatOpenAI, OpenAI, OpenAIEmbeddings\n", "from pydantic import BaseModel, Field" ] diff --git a/cookbook/wikibase_agent.ipynb b/cookbook/wikibase_agent.ipynb index 692193b0229..13c4063cf78 100644 --- a/cookbook/wikibase_agent.ipynb +++ b/cookbook/wikibase_agent.ipynb @@ -401,7 +401,7 @@ ")\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.schema import AgentAction, AgentFinish" + "from langchain_core.agents import AgentAction, AgentFinish" ] }, { diff --git a/docs/docs/expression_language/cookbook/multiple_chains.ipynb b/docs/docs/expression_language/cookbook/multiple_chains.ipynb index 60f87c3764a..8abe2671073 100644 --- a/docs/docs/expression_language/cookbook/multiple_chains.ipynb +++ b/docs/docs/expression_language/cookbook/multiple_chains.ipynb @@ -47,7 +47,7 @@ "source": [ "from operator import itemgetter\n", "\n", - "from langchain.schema import StrOutputParser\n", + "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_openai import ChatOpenAI\n", "\n", diff --git a/docs/docs/expression_language/cookbook/retrieval.ipynb b/docs/docs/expression_language/cookbook/retrieval.ipynb index e7708ca675b..89df3b6a0cf 100644 --- a/docs/docs/expression_language/cookbook/retrieval.ipynb +++ b/docs/docs/expression_language/cookbook/retrieval.ipynb @@ -169,8 +169,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import format_document\n", "from langchain_core.messages import AIMessage, HumanMessage, get_buffer_string\n", + "from langchain_core.prompts import format_document\n", "from langchain_core.runnables import RunnableParallel" ] }, diff --git a/docs/docs/expression_language/how_to/binding.ipynb b/docs/docs/expression_language/how_to/binding.ipynb index 375b863585b..fe25f1a3bc9 100644 --- a/docs/docs/expression_language/how_to/binding.ipynb +++ b/docs/docs/expression_language/how_to/binding.ipynb @@ -29,7 +29,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import StrOutputParser\n", + "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_core.runnables import RunnablePassthrough\n", "from langchain_openai import ChatOpenAI" diff --git a/docs/docs/expression_language/streaming.ipynb b/docs/docs/expression_language/streaming.ipynb index d597c4d825f..10254e6918f 100644 --- a/docs/docs/expression_language/streaming.ipynb +++ b/docs/docs/expression_language/streaming.ipynb @@ -68,7 +68,7 @@ "source": [ "# Showing the example using anthropic, but you can use\n", "# your favorite chat model!\n", - "from langchain.chat_models import ChatAnthropic\n", + "from langchain_community.chat_models import ChatAnthropic\n", "\n", "model = ChatAnthropic()\n", "\n", diff --git a/docs/docs/guides/evaluation/trajectory/custom.ipynb b/docs/docs/guides/evaluation/trajectory/custom.ipynb index c6be21a2793..6afb6ef4beb 100644 --- a/docs/docs/guides/evaluation/trajectory/custom.ipynb +++ b/docs/docs/guides/evaluation/trajectory/custom.ipynb @@ -35,7 +35,7 @@ "\n", "from langchain.chains import LLMChain\n", "from langchain.evaluation import AgentTrajectoryEvaluator\n", - "from langchain.schema import AgentAction\n", + "from langchain_core.agents import AgentAction\n", "from langchain_openai import ChatOpenAI\n", "\n", "\n", diff --git a/docs/docs/guides/privacy/presidio_data_anonymization/qa_privacy_protection.ipynb b/docs/docs/guides/privacy/presidio_data_anonymization/qa_privacy_protection.ipynb index 8c7f4574ef3..1bf0b77ab48 100644 --- a/docs/docs/guides/privacy/presidio_data_anonymization/qa_privacy_protection.ipynb +++ b/docs/docs/guides/privacy/presidio_data_anonymization/qa_privacy_protection.ipynb @@ -90,7 +90,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "\n", "documents = [Document(page_content=document_content)]" ] @@ -879,7 +879,7 @@ "outputs": [], "source": [ "from langchain.prompts.prompt import PromptTemplate\n", - "from langchain.schema import format_document\n", + "from langchain_core.prompts import format_document\n", "\n", "DEFAULT_DOCUMENT_PROMPT = PromptTemplate.from_template(template=\"{page_content}\")\n", "\n", diff --git a/docs/docs/integrations/callbacks/labelstudio.ipynb b/docs/docs/integrations/callbacks/labelstudio.ipynb index 91507b0b046..6170b7c0552 100644 --- a/docs/docs/integrations/callbacks/labelstudio.ipynb +++ b/docs/docs/integrations/callbacks/labelstudio.ipynb @@ -242,7 +242,7 @@ "outputs": [], "source": [ "from langchain.callbacks import LabelStudioCallbackHandler\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from langchain_openai import ChatOpenAI\n", "\n", "chat_llm = ChatOpenAI(\n", diff --git a/docs/docs/integrations/callbacks/llmonitor.md b/docs/docs/integrations/callbacks/llmonitor.md index 266332a7e34..a3513356b9c 100644 --- a/docs/docs/integrations/callbacks/llmonitor.md +++ b/docs/docs/integrations/callbacks/llmonitor.md @@ -53,7 +53,7 @@ Example: ```python from langchain_openai import ChatOpenAI -from langchain.schema import SystemMessage, HumanMessage +from langchain_core.messages import SystemMessage, HumanMessage from langchain.agents import OpenAIFunctionsAgent, AgentExecutor, tool from langchain.callbacks import LLMonitorCallbackHandler diff --git a/docs/docs/integrations/callbacks/trubrics.ipynb b/docs/docs/integrations/callbacks/trubrics.ipynb index 0469e313d0e..d4a6419e384 100644 --- a/docs/docs/integrations/callbacks/trubrics.ipynb +++ b/docs/docs/integrations/callbacks/trubrics.ipynb @@ -267,7 +267,7 @@ "outputs": [], "source": [ "from langchain.callbacks import TrubricsCallbackHandler\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/chat/anthropic_functions.ipynb b/docs/docs/integrations/chat/anthropic_functions.ipynb index 6b2a031f456..e91547c074e 100644 --- a/docs/docs/integrations/chat/anthropic_functions.ipynb +++ b/docs/docs/integrations/chat/anthropic_functions.ipynb @@ -83,7 +83,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage" + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/anyscale.ipynb b/docs/docs/integrations/chat/anyscale.ipynb index 29718174655..98cac216ad3 100644 --- a/docs/docs/integrations/chat/anyscale.ipynb +++ b/docs/docs/integrations/chat/anyscale.ipynb @@ -109,7 +109,7 @@ "source": [ "import asyncio\n", "\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(content=\"You are a helpful AI that shares everything you know.\"),\n", diff --git a/docs/docs/integrations/chat/azure_chat_openai.ipynb b/docs/docs/integrations/chat/azure_chat_openai.ipynb index 399edd6ebf7..57e677340c6 100644 --- a/docs/docs/integrations/chat/azure_chat_openai.ipynb +++ b/docs/docs/integrations/chat/azure_chat_openai.ipynb @@ -31,7 +31,7 @@ "source": [ "import os\n", "\n", - "from langchain.schema import HumanMessage\n", + "from langchain_core.messages import HumanMessage\n", "from langchain_openai import AzureChatOpenAI" ] }, diff --git a/docs/docs/integrations/chat/azureml_chat_endpoint.ipynb b/docs/docs/integrations/chat/azureml_chat_endpoint.ipynb index 6fe4c869f4f..0bca033c6af 100644 --- a/docs/docs/integrations/chat/azureml_chat_endpoint.ipynb +++ b/docs/docs/integrations/chat/azureml_chat_endpoint.ipynb @@ -74,11 +74,11 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models.azureml_endpoint import (\n", " AzureMLEndpointApiType,\n", " LlamaChatContentFormatter,\n", - ")" + ")\n", + "from langchain_core.messages import HumanMessage" ] }, { @@ -105,8 +105,8 @@ } ], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models.azureml_endpoint import LlamaContentFormatter\n", + "from langchain_core.messages import HumanMessage\n", "\n", "chat = AzureMLChatOnlineEndpoint(\n", " endpoint_url=\"https://..inference.ml.azure.com/score\",\n", diff --git a/docs/docs/integrations/chat/baichuan.ipynb b/docs/docs/integrations/chat/baichuan.ipynb index 3b184b953fa..8b46b88b2b0 100644 --- a/docs/docs/integrations/chat/baichuan.ipynb +++ b/docs/docs/integrations/chat/baichuan.ipynb @@ -29,8 +29,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import ChatBaichuan" + "from langchain_community.chat_models import ChatBaichuan\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/bedrock.ipynb b/docs/docs/integrations/chat/bedrock.ipynb index add4051b7c8..927a78b5859 100644 --- a/docs/docs/integrations/chat/bedrock.ipynb +++ b/docs/docs/integrations/chat/bedrock.ipynb @@ -47,8 +47,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import BedrockChat" + "from langchain_community.chat_models import BedrockChat\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/deepinfra.ipynb b/docs/docs/integrations/chat/deepinfra.ipynb index 0f88097a20a..121e8eaabf8 100644 --- a/docs/docs/integrations/chat/deepinfra.ipynb +++ b/docs/docs/integrations/chat/deepinfra.ipynb @@ -68,8 +68,8 @@ }, "outputs": [], "source": [ - "from langchain.chat_models import ChatDeepInfra\n", - "from langchain.schema import HumanMessage" + "from langchain_community.chat_models import ChatDeepInfra\n", + "from langchain_core.messages import HumanMessage" ] }, { @@ -216,7 +216,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.1" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/docs/integrations/chat/ernie.ipynb b/docs/docs/integrations/chat/ernie.ipynb index fd467c22276..cd0e121b06b 100644 --- a/docs/docs/integrations/chat/ernie.ipynb +++ b/docs/docs/integrations/chat/ernie.ipynb @@ -76,8 +76,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models import ErnieBotChat\n", + "from langchain_core.messages import HumanMessage\n", "\n", "chat = ErnieBotChat(\n", " ernie_client_id=\"YOUR_CLIENT_ID\", ernie_client_secret=\"YOUR_CLIENT_SECRET\"\n", diff --git a/docs/docs/integrations/chat/everlyai.ipynb b/docs/docs/integrations/chat/everlyai.ipynb index e4e548e85b2..3c08355cf70 100644 --- a/docs/docs/integrations/chat/everlyai.ipynb +++ b/docs/docs/integrations/chat/everlyai.ipynb @@ -73,8 +73,8 @@ } ], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", "from langchain_community.chat_models import ChatEverlyAI\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(content=\"You are a helpful AI that shares everything you know.\"),\n", @@ -127,8 +127,8 @@ ], "source": [ "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", - "from langchain.schema import HumanMessage, SystemMessage\n", "from langchain_community.chat_models import ChatEverlyAI\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(content=\"You are a humorous AI that delights people.\"),\n", @@ -185,8 +185,8 @@ ], "source": [ "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", - "from langchain.schema import HumanMessage, SystemMessage\n", "from langchain_community.chat_models import ChatEverlyAI\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(content=\"You are a humorous AI that delights people.\"),\n", diff --git a/docs/docs/integrations/chat/fireworks.ipynb b/docs/docs/integrations/chat/fireworks.ipynb index 4be0d01d78d..58afd0a65e5 100644 --- a/docs/docs/integrations/chat/fireworks.ipynb +++ b/docs/docs/integrations/chat/fireworks.ipynb @@ -37,8 +37,8 @@ "source": [ "import os\n", "\n", - "from langchain.schema import HumanMessage, SystemMessage\n", - "from langchain_community.chat_models.fireworks import ChatFireworks" + "from langchain_community.chat_models.fireworks import ChatFireworks\n", + "from langchain_core.messages import HumanMessage, SystemMessage" ] }, { diff --git a/docs/docs/integrations/chat/gigachat.ipynb b/docs/docs/integrations/chat/gigachat.ipynb index f4838512ed2..e33b0fa2cda 100644 --- a/docs/docs/integrations/chat/gigachat.ipynb +++ b/docs/docs/integrations/chat/gigachat.ipynb @@ -75,7 +75,7 @@ } ], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(\n", diff --git a/docs/docs/integrations/chat/gpt_router.ipynb b/docs/docs/integrations/chat/gpt_router.ipynb index 35fd32d7469..967bf5bbddb 100644 --- a/docs/docs/integrations/chat/gpt_router.ipynb +++ b/docs/docs/integrations/chat/gpt_router.ipynb @@ -70,9 +70,9 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models import GPTRouter\n", - "from langchain_community.chat_models.gpt_router import GPTRouterModel" + "from langchain_community.chat_models.gpt_router import GPTRouterModel\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/jinachat.ipynb b/docs/docs/integrations/chat/jinachat.ipynb index 672018f4775..08b247c7698 100644 --- a/docs/docs/integrations/chat/jinachat.ipynb +++ b/docs/docs/integrations/chat/jinachat.ipynb @@ -24,8 +24,8 @@ " HumanMessagePromptTemplate,\n", " SystemMessagePromptTemplate,\n", ")\n", - "from langchain.schema import HumanMessage, SystemMessage\n", - "from langchain_community.chat_models import JinaChat" + "from langchain_community.chat_models import JinaChat\n", + "from langchain_core.messages import HumanMessage, SystemMessage" ] }, { diff --git a/docs/docs/integrations/chat/konko.ipynb b/docs/docs/integrations/chat/konko.ipynb index 3582e21d230..293aa4eb5b0 100644 --- a/docs/docs/integrations/chat/konko.ipynb +++ b/docs/docs/integrations/chat/konko.ipynb @@ -40,8 +40,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", - "from langchain_community.chat_models import ChatKonko" + "from langchain_community.chat_models import ChatKonko\n", + "from langchain_core.messages import HumanMessage, SystemMessage" ] }, { diff --git a/docs/docs/integrations/chat/litellm.ipynb b/docs/docs/integrations/chat/litellm.ipynb index 1de1f3b9540..6ab12ff186d 100644 --- a/docs/docs/integrations/chat/litellm.ipynb +++ b/docs/docs/integrations/chat/litellm.ipynb @@ -32,8 +32,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import ChatLiteLLM" + "from langchain_community.chat_models import ChatLiteLLM\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/litellm_router.ipynb b/docs/docs/integrations/chat/litellm_router.ipynb index 46e7da49e92..4f31928526b 100644 --- a/docs/docs/integrations/chat/litellm_router.ipynb +++ b/docs/docs/integrations/chat/litellm_router.ipynb @@ -38,8 +38,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models import ChatLiteLLMRouter\n", + "from langchain_core.messages import HumanMessage\n", "from litellm import Router" ] }, diff --git a/docs/docs/integrations/chat/llama2_chat.ipynb b/docs/docs/integrations/chat/llama2_chat.ipynb index de1e773e34b..debb538f394 100644 --- a/docs/docs/integrations/chat/llama2_chat.ipynb +++ b/docs/docs/integrations/chat/llama2_chat.ipynb @@ -54,7 +54,7 @@ " HumanMessagePromptTemplate,\n", " MessagesPlaceholder,\n", ")\n", - "from langchain.schema import SystemMessage\n", + "from langchain_core.messages import SystemMessage\n", "\n", "template_messages = [\n", " SystemMessage(content=\"You are a helpful assistant.\"),\n", diff --git a/docs/docs/integrations/chat/minimax.ipynb b/docs/docs/integrations/chat/minimax.ipynb index 9b0735f7f27..3411b0beb06 100644 --- a/docs/docs/integrations/chat/minimax.ipynb +++ b/docs/docs/integrations/chat/minimax.ipynb @@ -39,8 +39,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import MiniMaxChat" + "from langchain_community.chat_models import MiniMaxChat\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/ollama.ipynb b/docs/docs/integrations/chat/ollama.ipynb index d0df5b4b99d..3e4279a95c1 100644 --- a/docs/docs/integrations/chat/ollama.ipynb +++ b/docs/docs/integrations/chat/ollama.ipynb @@ -278,7 +278,7 @@ } ], "source": [ - "from langchain.schema import HumanMessage\n", + "from langchain_core.messages import HumanMessage\n", "\n", "messages = [\n", " HumanMessage(\n", @@ -313,8 +313,8 @@ "source": [ "import json\n", "\n", - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models import ChatOllama\n", + "from langchain_core.messages import HumanMessage\n", "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "\n", @@ -463,8 +463,8 @@ } ], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models import ChatOllama\n", + "from langchain_core.messages import HumanMessage\n", "\n", "llm = ChatOllama(model=\"bakllava\", temperature=0)\n", "\n", diff --git a/docs/docs/integrations/chat/ollama_functions.ipynb b/docs/docs/integrations/chat/ollama_functions.ipynb index 707b8d74cca..8a2e2826e9d 100644 --- a/docs/docs/integrations/chat/ollama_functions.ipynb +++ b/docs/docs/integrations/chat/ollama_functions.ipynb @@ -102,7 +102,7 @@ } ], "source": [ - "from langchain.schema import HumanMessage\n", + "from langchain_core.messages import HumanMessage\n", "\n", "model.invoke(\"what is the weather in Boston?\")" ] diff --git a/docs/docs/integrations/chat/openai.ipynb b/docs/docs/integrations/chat/openai.ipynb index bdfc034267e..d5fe3e76605 100644 --- a/docs/docs/integrations/chat/openai.ipynb +++ b/docs/docs/integrations/chat/openai.ipynb @@ -34,7 +34,7 @@ " HumanMessagePromptTemplate,\n", " SystemMessagePromptTemplate,\n", ")\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/chat/promptlayer_chatopenai.ipynb b/docs/docs/integrations/chat/promptlayer_chatopenai.ipynb index de2cc76d707..7924442a89b 100644 --- a/docs/docs/integrations/chat/promptlayer_chatopenai.ipynb +++ b/docs/docs/integrations/chat/promptlayer_chatopenai.ipynb @@ -62,8 +62,8 @@ "source": [ "import os\n", "\n", - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import PromptLayerChatOpenAI" + "from langchain_community.chat_models import PromptLayerChatOpenAI\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/sparkllm.ipynb b/docs/docs/integrations/chat/sparkllm.ipynb index 4fe68f9a2a7..8e62af8baed 100644 --- a/docs/docs/integrations/chat/sparkllm.ipynb +++ b/docs/docs/integrations/chat/sparkllm.ipynb @@ -30,8 +30,8 @@ "outputs": [], "source": [ "\"\"\"For basic init and call\"\"\"\n", - "from langchain.chat_models import ChatSparkLLM\n", - "from langchain.schema import HumanMessage\n", + "from langchain_community.chat_models import ChatSparkLLM\n", + "from langchain_core.messages import HumanMessage\n", "\n", "chat = ChatSparkLLM(\n", " spark_app_id=\"\", spark_api_key=\"\", spark_api_secret=\"\"\n", diff --git a/docs/docs/integrations/chat/tencent_hunyuan.ipynb b/docs/docs/integrations/chat/tencent_hunyuan.ipynb index d184784f66f..4906f526789 100644 --- a/docs/docs/integrations/chat/tencent_hunyuan.ipynb +++ b/docs/docs/integrations/chat/tencent_hunyuan.ipynb @@ -36,8 +36,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import ChatHunyuan" + "from langchain_community.chat_models import ChatHunyuan\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/tongyi.ipynb b/docs/docs/integrations/chat/tongyi.ipynb index 4bfb8646c17..a80f876ac32 100644 --- a/docs/docs/integrations/chat/tongyi.ipynb +++ b/docs/docs/integrations/chat/tongyi.ipynb @@ -100,8 +100,8 @@ } ], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models.tongyi import ChatTongyi\n", + "from langchain_core.messages import HumanMessage\n", "\n", "chatLLM = ChatTongyi(\n", " streaming=True,\n", @@ -128,7 +128,7 @@ } ], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(\n", diff --git a/docs/docs/integrations/chat/vllm.ipynb b/docs/docs/integrations/chat/vllm.ipynb index 88333bcb01a..ef03e1d8d13 100644 --- a/docs/docs/integrations/chat/vllm.ipynb +++ b/docs/docs/integrations/chat/vllm.ipynb @@ -36,7 +36,7 @@ " HumanMessagePromptTemplate,\n", " SystemMessagePromptTemplate,\n", ")\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/chat/volcengine_maas.ipynb b/docs/docs/integrations/chat/volcengine_maas.ipynb index a82c8ce2e76..3cead3bac24 100644 --- a/docs/docs/integrations/chat/volcengine_maas.ipynb +++ b/docs/docs/integrations/chat/volcengine_maas.ipynb @@ -48,8 +48,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import VolcEngineMaasChat" + "from langchain_community.chat_models import VolcEngineMaasChat\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/yandex.ipynb b/docs/docs/integrations/chat/yandex.ipynb index 5a389fa84e7..403a97dae71 100644 --- a/docs/docs/integrations/chat/yandex.ipynb +++ b/docs/docs/integrations/chat/yandex.ipynb @@ -58,8 +58,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", - "from langchain_community.chat_models import ChatYandexGPT" + "from langchain_community.chat_models import ChatYandexGPT\n", + "from langchain_core.messages import HumanMessage, SystemMessage" ] }, { diff --git a/docs/docs/integrations/chat_loaders/discord.ipynb b/docs/docs/integrations/chat_loaders/discord.ipynb index b4eb0263b15..4425ce3deb3 100644 --- a/docs/docs/integrations/chat_loaders/discord.ipynb +++ b/docs/docs/integrations/chat_loaders/discord.ipynb @@ -79,8 +79,8 @@ "import re\n", "from typing import Iterator, List\n", "\n", - "from langchain.schema import BaseMessage, HumanMessage\n", "from langchain_community.chat_loaders import base as chat_loaders\n", + "from langchain_core.messages import BaseMessage, HumanMessage\n", "\n", "logger = logging.getLogger()\n", "\n", diff --git a/docs/docs/integrations/chat_loaders/twitter.ipynb b/docs/docs/integrations/chat_loaders/twitter.ipynb index af80f142c20..e906af7e67b 100644 --- a/docs/docs/integrations/chat_loaders/twitter.ipynb +++ b/docs/docs/integrations/chat_loaders/twitter.ipynb @@ -22,7 +22,7 @@ "import json\n", "\n", "from langchain.adapters.openai import convert_message_to_dict\n", - "from langchain.schema import AIMessage" + "from langchain_core.messages import AIMessage" ] }, { diff --git a/docs/docs/integrations/chat_loaders/wechat.ipynb b/docs/docs/integrations/chat_loaders/wechat.ipynb index bb81d8cc887..0a0146a4954 100644 --- a/docs/docs/integrations/chat_loaders/wechat.ipynb +++ b/docs/docs/integrations/chat_loaders/wechat.ipynb @@ -78,8 +78,8 @@ "import re\n", "from typing import Iterator, List\n", "\n", - "from langchain.schema import BaseMessage, HumanMessage\n", "from langchain_community.chat_loaders import base as chat_loaders\n", + "from langchain_core.messages import BaseMessage, HumanMessage\n", "\n", "logger = logging.getLogger()\n", "\n", diff --git a/docs/docs/integrations/document_loaders/tensorflow_datasets.ipynb b/docs/docs/integrations/document_loaders/tensorflow_datasets.ipynb index 390597bbd82..61c9d22f0bc 100644 --- a/docs/docs/integrations/document_loaders/tensorflow_datasets.ipynb +++ b/docs/docs/integrations/document_loaders/tensorflow_datasets.ipynb @@ -198,8 +198,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.document_loaders import TensorflowDatasetLoader\n", + "from langchain_core.documents import Document\n", "\n", "loader = TensorflowDatasetLoader(\n", " dataset_name=\"mlqa/en\",\n", diff --git a/docs/docs/integrations/document_transformers/doctran_extract_properties.ipynb b/docs/docs/integrations/document_transformers/doctran_extract_properties.ipynb index bba3221ec34..56d5240cf0e 100644 --- a/docs/docs/integrations/document_transformers/doctran_extract_properties.ipynb +++ b/docs/docs/integrations/document_transformers/doctran_extract_properties.ipynb @@ -32,8 +32,8 @@ "source": [ "import json\n", "\n", - "from langchain.schema import Document\n", - "from langchain_community.document_transformers import DoctranPropertyExtractor" + "from langchain_community.document_transformers import DoctranPropertyExtractor\n", + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/document_transformers/doctran_interrogate_document.ipynb b/docs/docs/integrations/document_transformers/doctran_interrogate_document.ipynb index 1f52616b23c..1d6fdd07f5b 100644 --- a/docs/docs/integrations/document_transformers/doctran_interrogate_document.ipynb +++ b/docs/docs/integrations/document_transformers/doctran_interrogate_document.ipynb @@ -30,8 +30,8 @@ "source": [ "import json\n", "\n", - "from langchain.schema import Document\n", - "from langchain_community.document_transformers import DoctranQATransformer" + "from langchain_community.document_transformers import DoctranQATransformer\n", + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/document_transformers/doctran_translate_document.ipynb b/docs/docs/integrations/document_transformers/doctran_translate_document.ipynb index 7f1e93c1111..6f46135b5a4 100644 --- a/docs/docs/integrations/document_transformers/doctran_translate_document.ipynb +++ b/docs/docs/integrations/document_transformers/doctran_translate_document.ipynb @@ -28,8 +28,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", - "from langchain_community.document_transformers import DoctranTextTranslator" + "from langchain_community.document_transformers import DoctranTextTranslator\n", + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/document_transformers/google_translate.ipynb b/docs/docs/integrations/document_transformers/google_translate.ipynb index 67a84aa7f9a..cfe74de5831 100644 --- a/docs/docs/integrations/document_transformers/google_translate.ipynb +++ b/docs/docs/integrations/document_transformers/google_translate.ipynb @@ -31,8 +31,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", - "from langchain_community.document_transformers import GoogleTranslateTransformer" + "from langchain_community.document_transformers import GoogleTranslateTransformer\n", + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/document_transformers/openai_metadata_tagger.ipynb b/docs/docs/integrations/document_transformers/openai_metadata_tagger.ipynb index 11a4b2ae960..dbe8341274b 100644 --- a/docs/docs/integrations/document_transformers/openai_metadata_tagger.ipynb +++ b/docs/docs/integrations/document_transformers/openai_metadata_tagger.ipynb @@ -21,10 +21,10 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.document_transformers.openai_functions import (\n", " create_metadata_tagger,\n", ")\n", + "from langchain_core.documents import Document\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/llms/azure_ml.ipynb b/docs/docs/integrations/llms/azure_ml.ipynb index 9d066bddb3c..b2adb40a84b 100644 --- a/docs/docs/integrations/llms/azure_ml.ipynb +++ b/docs/docs/integrations/llms/azure_ml.ipynb @@ -70,11 +70,11 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.llms.azureml_endpoint import (\n", " AzureMLEndpointApiType,\n", " LlamaContentFormatter,\n", ")\n", + "from langchain_core.messages import HumanMessage\n", "\n", "llm = AzureMLOnlineEndpoint(\n", " endpoint_url=\"https://..inference.ml.azure.com/score\",\n", @@ -117,11 +117,11 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.llms.azureml_endpoint import (\n", " AzureMLEndpointApiType,\n", " LlamaContentFormatter,\n", ")\n", + "from langchain_core.messages import HumanMessage\n", "\n", "llm = AzureMLOnlineEndpoint(\n", " endpoint_url=\"https://..inference.ml.azure.com/v1/completions\",\n", diff --git a/docs/docs/integrations/llms/javelin.ipynb b/docs/docs/integrations/llms/javelin.ipynb index 935dd0c6e10..c5bcc247d2d 100644 --- a/docs/docs/integrations/llms/javelin.ipynb +++ b/docs/docs/integrations/llms/javelin.ipynb @@ -180,8 +180,8 @@ } ], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", "from langchain_community.chat_models import ChatJavelinAIGateway\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(\n", diff --git a/docs/docs/integrations/memory/zep_memory.ipynb b/docs/docs/integrations/memory/zep_memory.ipynb index e1018e12359..86379dd7851 100644 --- a/docs/docs/integrations/memory/zep_memory.ipynb +++ b/docs/docs/integrations/memory/zep_memory.ipynb @@ -52,8 +52,8 @@ "from langchain.agents import AgentType, Tool, initialize_agent\n", "from langchain.memory import ZepMemory\n", "from langchain.retrievers import ZepRetriever\n", - "from langchain.schema import AIMessage, HumanMessage\n", "from langchain_community.utilities import WikipediaAPIWrapper\n", + "from langchain_core.messages import AIMessage, HumanMessage\n", "from langchain_openai import OpenAI\n", "\n", "# Set this to your Zep server URL\n", diff --git a/docs/docs/integrations/providers/arthur_tracking.ipynb b/docs/docs/integrations/providers/arthur_tracking.ipynb index 74114875f37..e7db9365ddd 100644 --- a/docs/docs/integrations/providers/arthur_tracking.ipynb +++ b/docs/docs/integrations/providers/arthur_tracking.ipynb @@ -28,7 +28,7 @@ "source": [ "from langchain.callbacks import ArthurCallbackHandler\n", "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", - "from langchain.schema import HumanMessage\n", + "from langchain_core.messages import HumanMessage\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/providers/cohere.mdx b/docs/docs/integrations/providers/cohere.mdx index 729874a25d8..48c091d0712 100644 --- a/docs/docs/integrations/providers/cohere.mdx +++ b/docs/docs/integrations/providers/cohere.mdx @@ -27,7 +27,7 @@ Get a [Cohere api key](https://dashboard.cohere.ai/) and set it as an environmen ```python from langchain_community.chat_models import ChatCohere -from langchain.schema import HumanMessage +from langchain_core.messages import HumanMessage chat = ChatCohere() messages = [HumanMessage(content="knock knock")] print(chat(messages)) diff --git a/docs/docs/integrations/providers/flyte.mdx b/docs/docs/integrations/providers/flyte.mdx index 1e7dbb748ca..1e75bea5c7c 100644 --- a/docs/docs/integrations/providers/flyte.mdx +++ b/docs/docs/integrations/providers/flyte.mdx @@ -30,7 +30,7 @@ from langchain.callbacks import FlyteCallbackHandler from langchain.chains import LLMChain from langchain_openai import ChatOpenAI from langchain.prompts import PromptTemplate -from langchain.schema import HumanMessage +from langchain_core.messages import HumanMessage ``` Set up the necessary environment variables to utilize the OpenAI API and Serp API: diff --git a/docs/docs/integrations/providers/javelin_ai_gateway.mdx b/docs/docs/integrations/providers/javelin_ai_gateway.mdx index d4f4ceeec02..41e7f6fe45b 100644 --- a/docs/docs/integrations/providers/javelin_ai_gateway.mdx +++ b/docs/docs/integrations/providers/javelin_ai_gateway.mdx @@ -66,7 +66,7 @@ print(embeddings.embed_documents(["hello"])) ## Chat Example ```python from langchain_community.chat_models import ChatJavelinAIGateway -from langchain.schema import HumanMessage, SystemMessage +from langchain_core.messages import HumanMessage, SystemMessage messages = [ SystemMessage( diff --git a/docs/docs/integrations/providers/konko.mdx b/docs/docs/integrations/providers/konko.mdx index 5e474d4ae00..47f3f088f87 100644 --- a/docs/docs/integrations/providers/konko.mdx +++ b/docs/docs/integrations/providers/konko.mdx @@ -55,7 +55,7 @@ See a usage [example](/docs/integrations/chat/konko). - **ChatCompletion with Mistral-7B:** ```python - from langchain.schema import HumanMessage + from langchain_core.messages import HumanMessage from langchain_community.chat_models import ChatKonko chat_instance = ChatKonko(max_tokens=10, model = 'mistralai/mistral-7b-instruct-v0.1') msg = HumanMessage(content="Hi") diff --git a/docs/docs/integrations/providers/log10.mdx b/docs/docs/integrations/providers/log10.mdx index bd5890bb1f9..38ef1fc7630 100644 --- a/docs/docs/integrations/providers/log10.mdx +++ b/docs/docs/integrations/providers/log10.mdx @@ -18,7 +18,7 @@ Integration with log10 is a simple one-line `log10_callback` integration as show ```python from langchain_openai import ChatOpenAI -from langchain.schema import HumanMessage +from langchain_core.messages import HumanMessage from log10.langchain import Log10Callback from log10.llm import Log10Config @@ -43,7 +43,7 @@ llm = ChatOpenAI(model_name="gpt-3.5-turbo", callbacks=[log10_callback]) from langchain_openai import OpenAI from langchain_community.chat_models import ChatAnthropic from langchain_openai import ChatOpenAI -from langchain.schema import HumanMessage +from langchain_core.messages import HumanMessage from log10.langchain import Log10Callback from log10.llm import Log10Config diff --git a/docs/docs/integrations/providers/mlflow.mdx b/docs/docs/integrations/providers/mlflow.mdx index 5219f0b7a9c..791b976f388 100644 --- a/docs/docs/integrations/providers/mlflow.mdx +++ b/docs/docs/integrations/providers/mlflow.mdx @@ -100,7 +100,7 @@ print(embeddings.embed_documents(["hello"])) ```python from langchain_community.chat_models import ChatMlflow -from langchain.schema import HumanMessage, SystemMessage +from langchain_core.messages import HumanMessage, SystemMessage chat = ChatMlflow( target_uri="http://127.0.0.1:5000", diff --git a/docs/docs/integrations/providers/mlflow_ai_gateway.mdx b/docs/docs/integrations/providers/mlflow_ai_gateway.mdx index dccabba4945..a18f4a28e68 100644 --- a/docs/docs/integrations/providers/mlflow_ai_gateway.mdx +++ b/docs/docs/integrations/providers/mlflow_ai_gateway.mdx @@ -113,7 +113,7 @@ print(embeddings.embed_documents(["hello"])) ```python from langchain_community.chat_models import ChatMLflowAIGateway -from langchain.schema import HumanMessage, SystemMessage +from langchain_core.messages import HumanMessage, SystemMessage chat = ChatMLflowAIGateway( gateway_uri="http://127.0.0.1:5000", diff --git a/docs/docs/integrations/retrievers/activeloop.ipynb b/docs/docs/integrations/retrievers/activeloop.ipynb index 42b71c4a0ca..e703ecabf06 100644 --- a/docs/docs/integrations/retrievers/activeloop.ipynb +++ b/docs/docs/integrations/retrievers/activeloop.ipynb @@ -276,7 +276,7 @@ "from langchain.chains.openai_functions import (\n", " create_structured_output_chain,\n", ")\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate\n", "from langchain_openai import ChatOpenAI\n", "from pydantic import BaseModel, Field" diff --git a/docs/docs/integrations/retrievers/bm25.ipynb b/docs/docs/integrations/retrievers/bm25.ipynb index 31160541140..d7416048d85 100644 --- a/docs/docs/integrations/retrievers/bm25.ipynb +++ b/docs/docs/integrations/retrievers/bm25.ipynb @@ -81,7 +81,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "\n", "retriever = BM25Retriever.from_documents(\n", " [\n", diff --git a/docs/docs/integrations/retrievers/fleet_context.ipynb b/docs/docs/integrations/retrievers/fleet_context.ipynb index 736184a8366..0609d6cba2f 100644 --- a/docs/docs/integrations/retrievers/fleet_context.ipynb +++ b/docs/docs/integrations/retrievers/fleet_context.ipynb @@ -34,8 +34,8 @@ "\n", "import pandas as pd\n", "from langchain.retrievers import MultiVectorRetriever\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.documents import Document\n", "from langchain_core.stores import BaseStore\n", "from langchain_core.vectorstores import VectorStore\n", "from langchain_openai import OpenAIEmbeddings\n", @@ -194,7 +194,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import StrOutputParser\n", + "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_core.runnables import RunnablePassthrough\n", "from langchain_openai import ChatOpenAI\n", diff --git a/docs/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query.ipynb index 8b4f7aeed99..64cdc8f6702 100644 --- a/docs/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query.ipynb @@ -83,8 +83,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import DeepLake\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/astradb.ipynb b/docs/docs/integrations/retrievers/self_query/astradb.ipynb index 43386e6a94b..aa8e81b5e14 100644 --- a/docs/docs/integrations/retrievers/self_query/astradb.ipynb +++ b/docs/docs/integrations/retrievers/self_query/astradb.ipynb @@ -84,8 +84,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain.vectorstores import AstraDB\n", + "from langchain_core.documents import Document\n", "\n", "docs = [\n", " Document(\n", diff --git a/docs/docs/integrations/retrievers/self_query/chroma_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/chroma_self_query.ipynb index 7341495e8a0..8e316f146d9 100644 --- a/docs/docs/integrations/retrievers/self_query/chroma_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/chroma_self_query.ipynb @@ -87,8 +87,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Chroma\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/dashvector.ipynb b/docs/docs/integrations/retrievers/self_query/dashvector.ipynb index 4f3e0b09ec8..7f58c757d94 100644 --- a/docs/docs/integrations/retrievers/self_query/dashvector.ipynb +++ b/docs/docs/integrations/retrievers/self_query/dashvector.ipynb @@ -92,9 +92,9 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.embeddings import DashScopeEmbeddings\n", "from langchain_community.vectorstores import DashVector\n", + "from langchain_core.documents import Document\n", "\n", "embeddings = DashScopeEmbeddings()\n", "\n", diff --git a/docs/docs/integrations/retrievers/self_query/elasticsearch_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/elasticsearch_self_query.ipynb index 047f48f8e21..6bef6db0d0c 100644 --- a/docs/docs/integrations/retrievers/self_query/elasticsearch_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/elasticsearch_self_query.ipynb @@ -60,8 +60,8 @@ "import getpass\n", "import os\n", "\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import ElasticsearchStore\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"OpenAI API Key:\")\n", diff --git a/docs/docs/integrations/retrievers/self_query/milvus_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/milvus_self_query.ipynb index d5d9775ec83..2a66e110406 100644 --- a/docs/docs/integrations/retrievers/self_query/milvus_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/milvus_self_query.ipynb @@ -67,8 +67,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Milvus\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/mongodb_atlas.ipynb b/docs/docs/integrations/retrievers/self_query/mongodb_atlas.ipynb index 1ab8ae71868..cfe0aa6a79e 100644 --- a/docs/docs/integrations/retrievers/self_query/mongodb_atlas.ipynb +++ b/docs/docs/integrations/retrievers/self_query/mongodb_atlas.ipynb @@ -57,8 +57,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import MongoDBAtlasVectorSearch\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "from pymongo import MongoClient\n", "\n", diff --git a/docs/docs/integrations/retrievers/self_query/myscale_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/myscale_self_query.ipynb index a886efe8f57..3fb8c27ea47 100644 --- a/docs/docs/integrations/retrievers/self_query/myscale_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/myscale_self_query.ipynb @@ -78,8 +78,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import MyScale\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/opensearch_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/opensearch_self_query.ipynb index 7779ccc5192..1f46e5e2d25 100644 --- a/docs/docs/integrations/retrievers/self_query/opensearch_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/opensearch_self_query.ipynb @@ -59,8 +59,8 @@ "import getpass\n", "import os\n", "\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import OpenSearchVectorSearch\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"OpenAI API Key:\")\n", diff --git a/docs/docs/integrations/retrievers/self_query/pgvector_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/pgvector_self_query.ipynb index b6475109362..8daf192f593 100644 --- a/docs/docs/integrations/retrievers/self_query/pgvector_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/pgvector_self_query.ipynb @@ -67,8 +67,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import PGVector\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "collection = \"Name of your collection\"\n", diff --git a/docs/docs/integrations/retrievers/self_query/pinecone.ipynb b/docs/docs/integrations/retrievers/self_query/pinecone.ipynb index 576cd7e53b7..fe1ceaa988b 100644 --- a/docs/docs/integrations/retrievers/self_query/pinecone.ipynb +++ b/docs/docs/integrations/retrievers/self_query/pinecone.ipynb @@ -77,7 +77,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", + "from langchain_community.vectorstores import Pinecone\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "from langchain_pinecone import PineconeVectorStore\n", "\n", diff --git a/docs/docs/integrations/retrievers/self_query/qdrant_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/qdrant_self_query.ipynb index f7915707ba5..063fa3573d7 100644 --- a/docs/docs/integrations/retrievers/self_query/qdrant_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/qdrant_self_query.ipynb @@ -70,8 +70,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Qdrant\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/redis_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/redis_self_query.ipynb index 42c57603e7c..0d5adf0ce0c 100644 --- a/docs/docs/integrations/retrievers/self_query/redis_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/redis_self_query.ipynb @@ -67,8 +67,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Redis\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/supabase_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/supabase_self_query.ipynb index fe16a03a790..7477cfec580 100644 --- a/docs/docs/integrations/retrievers/self_query/supabase_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/supabase_self_query.ipynb @@ -217,8 +217,8 @@ "source": [ "import os\n", "\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import SupabaseVectorStore\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "from supabase.client import Client, create_client\n", "\n", diff --git a/docs/docs/integrations/retrievers/self_query/timescalevector_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/timescalevector_self_query.ipynb index acac5be8e2b..9dc762d025e 100644 --- a/docs/docs/integrations/retrievers/self_query/timescalevector_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/timescalevector_self_query.ipynb @@ -143,8 +143,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores.timescalevector import TimescaleVector\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/vectara_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/vectara_self_query.ipynb index d8171be1caf..cedc3eb5103 100644 --- a/docs/docs/integrations/retrievers/self_query/vectara_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/vectara_self_query.ipynb @@ -89,11 +89,11 @@ "from langchain.chains import ConversationalRetrievalChain\n", "from langchain.chains.query_constructor.base import AttributeInfo\n", "from langchain.retrievers.self_query.base import SelfQueryRetriever\n", - "from langchain.schema import Document\n", "from langchain.text_splitter import CharacterTextSplitter\n", "from langchain_community.document_loaders import TextLoader\n", "from langchain_community.embeddings import FakeEmbeddings\n", "from langchain_community.vectorstores import Vectara\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAI" ] }, diff --git a/docs/docs/integrations/retrievers/self_query/weaviate_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/weaviate_self_query.ipynb index 1e50c9ad21a..ed29277217c 100644 --- a/docs/docs/integrations/retrievers/self_query/weaviate_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/weaviate_self_query.ipynb @@ -45,8 +45,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Weaviate\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/tf_idf.ipynb b/docs/docs/integrations/retrievers/tf_idf.ipynb index 9576f8b0dfe..3e8d2eb4bd5 100644 --- a/docs/docs/integrations/retrievers/tf_idf.ipynb +++ b/docs/docs/integrations/retrievers/tf_idf.ipynb @@ -73,7 +73,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "\n", "retriever = TFIDFRetriever.from_documents(\n", " [\n", diff --git a/docs/docs/integrations/retrievers/weaviate-hybrid.ipynb b/docs/docs/integrations/retrievers/weaviate-hybrid.ipynb index 43c24d09300..fdcad48d659 100644 --- a/docs/docs/integrations/retrievers/weaviate-hybrid.ipynb +++ b/docs/docs/integrations/retrievers/weaviate-hybrid.ipynb @@ -74,7 +74,7 @@ ], "source": [ "from langchain.retrievers.weaviate_hybrid_search import WeaviateHybridSearchRetriever\n", - "from langchain.schema import Document" + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/retrievers/zep_memorystore.ipynb b/docs/docs/integrations/retrievers/zep_memorystore.ipynb index 8aeca6c1add..95a4de10efe 100644 --- a/docs/docs/integrations/retrievers/zep_memorystore.ipynb +++ b/docs/docs/integrations/retrievers/zep_memorystore.ipynb @@ -63,7 +63,7 @@ "from uuid import uuid4\n", "\n", "from langchain.memory import ZepMemory\n", - "from langchain.schema import AIMessage, HumanMessage\n", + "from langchain_core.messages import AIMessage, HumanMessage\n", "\n", "# Set this to your Zep server URL\n", "ZEP_API_URL = \"http://localhost:8000\"" diff --git a/docs/docs/integrations/toolkits/cogniswitch.ipynb b/docs/docs/integrations/toolkits/cogniswitch.ipynb index 836f425cf60..e39168cb477 100644 --- a/docs/docs/integrations/toolkits/cogniswitch.ipynb +++ b/docs/docs/integrations/toolkits/cogniswitch.ipynb @@ -8,13 +8,19 @@ "## Cogniswitch Tools\n", "\n", "**Use CogniSwitch to build production ready applications that can consume, organize and retrieve knowledge flawlessly. Using the framework of your choice, in this case Langchain CogniSwitch helps alleviate the stress of decision making when it comes to, choosing the right storage and retrieval formats. It also eradicates reliability issues and hallucinations when it comes to responses that are generated. Get started by interacting with your knowledge in just two simple steps.**\n", - "\n", - "visit [https://www.cogniswitch.ai/developer to register](https://www.cogniswitch.ai/developer?utm_source=langchain&utm_medium=langchainbuild&utm_id=dev).\n\n", - "**Registration:** \n\n", - "- Signup with your email and verify your registration \n\n", - "- You will get a mail with a platform token and oauth token for using the services.\n\n\n", "\n", - "**step 1: Instantiate the toolkit and get the tools:**\n\n", + "visit [https://www.cogniswitch.ai/developer to register](https://www.cogniswitch.ai/developer?utm_source=langchain&utm_medium=langchainbuild&utm_id=dev).\n", + "\n", + "**Registration:** \n", + "\n", + "- Signup with your email and verify your registration \n", + "\n", + "- You will get a mail with a platform token and oauth token for using the services.\n", + "\n", + "\n", + "\n", + "**step 1: Instantiate the toolkit and get the tools:**\n", + "\n", "- Instantiate the cogniswitch toolkit with the cogniswitch token, openAI API key and OAuth token and get the tools. \n", "\n", "**step 2: Instantiate the agent with the tools and llm:**\n", @@ -61,8 +67,8 @@ "import os\n", "\n", "from langchain.agents.agent_toolkits import create_conversational_retrieval_agent\n", - "from langchain.chat_models import ChatOpenAI\n", - "from langchain_community.agent_toolkits import CogniswitchToolkit" + "from langchain_community.agent_toolkits import CogniswitchToolkit\n", + "from langchain_openai import ChatOpenAI" ] }, { diff --git a/docs/docs/integrations/toolkits/connery.ipynb b/docs/docs/integrations/toolkits/connery.ipynb index 184b934b635..335fbcf34e3 100644 --- a/docs/docs/integrations/toolkits/connery.ipynb +++ b/docs/docs/integrations/toolkits/connery.ipynb @@ -74,8 +74,8 @@ "import os\n", "\n", "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.chat_models import ChatOpenAI\n", "from langchain_community.agent_toolkits.connery import ConneryToolkit\n", + "from langchain_community.chat_models import ChatOpenAI\n", "from langchain_community.tools.connery import ConneryService\n", "\n", "# Specify your Connery Runner credentials.\n", diff --git a/docs/docs/integrations/toolkits/robocorp.ipynb b/docs/docs/integrations/toolkits/robocorp.ipynb index 2dc39ea75e6..a5ba7b3fb49 100644 --- a/docs/docs/integrations/toolkits/robocorp.ipynb +++ b/docs/docs/integrations/toolkits/robocorp.ipynb @@ -131,8 +131,8 @@ ], "source": [ "from langchain.agents import AgentExecutor, OpenAIFunctionsAgent\n", - "from langchain.chat_models import ChatOpenAI\n", "from langchain_core.messages import SystemMessage\n", + "from langchain_openai import ChatOpenAI\n", "from langchain_robocorp import ActionServerToolkit\n", "\n", "# Initialize LLM chat model\n", diff --git a/docs/docs/integrations/tools/connery.ipynb b/docs/docs/integrations/tools/connery.ipynb index a5c08296937..43228f56f38 100644 --- a/docs/docs/integrations/tools/connery.ipynb +++ b/docs/docs/integrations/tools/connery.ipynb @@ -49,8 +49,8 @@ "import os\n", "\n", "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.chat_models import ChatOpenAI\n", "from langchain_community.tools.connery import ConneryService\n", + "from langchain_openai import ChatOpenAI\n", "\n", "# Specify your Connery Runner credentials.\n", "os.environ[\"CONNERY_RUNNER_URL\"] = \"\"\n", diff --git a/docs/docs/integrations/tools/exa_search.ipynb b/docs/docs/integrations/tools/exa_search.ipynb index 50432253313..8347c2cb5b8 100644 --- a/docs/docs/integrations/tools/exa_search.ipynb +++ b/docs/docs/integrations/tools/exa_search.ipynb @@ -204,7 +204,7 @@ "outputs": [], "source": [ "from langchain.agents import AgentExecutor, OpenAIFunctionsAgent\n", - "from langchain.schema import SystemMessage\n", + "from langchain_core.messages import SystemMessage\n", "from langchain_openai import ChatOpenAI\n", "\n", "llm = ChatOpenAI(temperature=0)\n", @@ -393,7 +393,7 @@ "outputs": [], "source": [ "from langchain.agents import AgentExecutor, OpenAIFunctionsAgent\n", - "from langchain.schema import SystemMessage\n", + "from langchain_core.messages import SystemMessage\n", "from langchain_openai import ChatOpenAI\n", "\n", "llm = ChatOpenAI(temperature=0, model=\"gpt-4\")\n", diff --git a/docs/docs/integrations/vectorstores/astradb.ipynb b/docs/docs/integrations/vectorstores/astradb.ipynb index fe980dd1377..92f2b3d0f36 100644 --- a/docs/docs/integrations/vectorstores/astradb.ipynb +++ b/docs/docs/integrations/vectorstores/astradb.ipynb @@ -91,9 +91,9 @@ "from datasets import (\n", " load_dataset,\n", ")\n", - "from langchain.schema import Document\n", "from langchain.text_splitter import RecursiveCharacterTextSplitter\n", "from langchain_community.document_loaders import PyPDFLoader\n", + "from langchain_core.documents import Document\n", "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_core.runnables import RunnablePassthrough\n", diff --git a/docs/docs/integrations/vectorstores/cassandra.ipynb b/docs/docs/integrations/vectorstores/cassandra.ipynb index 524f76a1052..b18376c73e8 100644 --- a/docs/docs/integrations/vectorstores/cassandra.ipynb +++ b/docs/docs/integrations/vectorstores/cassandra.ipynb @@ -74,9 +74,9 @@ "from datasets import (\n", " load_dataset,\n", ")\n", - "from langchain.schema import Document\n", "from langchain.text_splitter import RecursiveCharacterTextSplitter\n", "from langchain_community.document_loaders import PyPDFLoader\n", + "from langchain_core.documents import Document\n", "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_core.runnables import RunnablePassthrough\n", diff --git a/docs/docs/integrations/vectorstores/faiss.ipynb b/docs/docs/integrations/vectorstores/faiss.ipynb index 54894e4c66e..12f3f1e45d8 100644 --- a/docs/docs/integrations/vectorstores/faiss.ipynb +++ b/docs/docs/integrations/vectorstores/faiss.ipynb @@ -437,7 +437,7 @@ } ], "source": [ - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "\n", "list_of_documents = [\n", " Document(page_content=\"foo\", metadata=dict(page=1)),\n", diff --git a/docs/docs/integrations/vectorstores/faiss_async.ipynb b/docs/docs/integrations/vectorstores/faiss_async.ipynb index a4663ec85a5..38f94c4f53c 100644 --- a/docs/docs/integrations/vectorstores/faiss_async.ipynb +++ b/docs/docs/integrations/vectorstores/faiss_async.ipynb @@ -288,7 +288,7 @@ } ], "source": [ - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "\n", "list_of_documents = [\n", " Document(page_content=\"foo\", metadata=dict(page=1)),\n", diff --git a/docs/docs/modules/callbacks/async_callbacks.ipynb b/docs/docs/modules/callbacks/async_callbacks.ipynb index 244fc1e7763..a1326a5cc61 100644 --- a/docs/docs/modules/callbacks/async_callbacks.ipynb +++ b/docs/docs/modules/callbacks/async_callbacks.ipynb @@ -62,7 +62,7 @@ "from typing import Any, Dict, List\n", "\n", "from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler\n", - "from langchain.schema import HumanMessage, LLMResult\n", + "from langchain_core.messages import HumanMessage, LLMResult\n", "from langchain_openai import ChatOpenAI\n", "\n", "\n", diff --git a/docs/docs/modules/callbacks/custom_callbacks.ipynb b/docs/docs/modules/callbacks/custom_callbacks.ipynb index 185a249dd90..8adaafd6e03 100644 --- a/docs/docs/modules/callbacks/custom_callbacks.ipynb +++ b/docs/docs/modules/callbacks/custom_callbacks.ipynb @@ -53,7 +53,7 @@ ], "source": [ "from langchain.callbacks.base import BaseCallbackHandler\n", - "from langchain.schema import HumanMessage\n", + "from langchain_core.messages import HumanMessage\n", "from langchain_openai import ChatOpenAI\n", "\n", "\n", diff --git a/docs/docs/modules/callbacks/multiple_callbacks.ipynb b/docs/docs/modules/callbacks/multiple_callbacks.ipynb index 208c5292259..252f660c15f 100644 --- a/docs/docs/modules/callbacks/multiple_callbacks.ipynb +++ b/docs/docs/modules/callbacks/multiple_callbacks.ipynb @@ -128,7 +128,7 @@ "\n", "from langchain.agents import AgentType, initialize_agent, load_tools\n", "from langchain.callbacks.base import BaseCallbackHandler\n", - "from langchain.schema import AgentAction\n", + "from langchain_core.agents import AgentAction\n", "from langchain_openai import OpenAI\n", "\n", "\n", diff --git a/docs/docs/modules/data_connection/indexing.ipynb b/docs/docs/modules/data_connection/indexing.ipynb index 45a5d92a42b..7767f931ddb 100644 --- a/docs/docs/modules/data_connection/indexing.ipynb +++ b/docs/docs/modules/data_connection/indexing.ipynb @@ -91,8 +91,8 @@ "outputs": [], "source": [ "from langchain.indexes import SQLRecordManager, index\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import ElasticsearchStore\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings" ] }, diff --git a/docs/docs/modules/data_connection/retrievers/index.mdx b/docs/docs/modules/data_connection/retrievers/index.mdx index 4f5ca072841..40e3efa229e 100644 --- a/docs/docs/modules/data_connection/retrievers/index.mdx +++ b/docs/docs/modules/data_connection/retrievers/index.mdx @@ -49,7 +49,7 @@ Since retrievers are `Runnable`'s, we can easily compose them with other `Runnab ```python from langchain_openai import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate -from langchain.schema import StrOutputParser +from langchain_core.output_parsers import StrOutputParser from langchain_core.runnables import RunnablePassthrough template = """Answer the question based only on the following context: diff --git a/docs/docs/modules/data_connection/retrievers/self_query.ipynb b/docs/docs/modules/data_connection/retrievers/self_query.ipynb index 2b44db886f0..973a55ad699 100644 --- a/docs/docs/modules/data_connection/retrievers/self_query.ipynb +++ b/docs/docs/modules/data_connection/retrievers/self_query.ipynb @@ -40,8 +40,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Chroma\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "docs = [\n", diff --git a/docs/docs/modules/data_connection/retrievers/time_weighted_vectorstore.ipynb b/docs/docs/modules/data_connection/retrievers/time_weighted_vectorstore.ipynb index 3f739f0f300..bc334cb6401 100644 --- a/docs/docs/modules/data_connection/retrievers/time_weighted_vectorstore.ipynb +++ b/docs/docs/modules/data_connection/retrievers/time_weighted_vectorstore.ipynb @@ -30,8 +30,8 @@ "import faiss\n", "from langchain.docstore import InMemoryDocstore\n", "from langchain.retrievers import TimeWeightedVectorStoreRetriever\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings" ] }, diff --git a/docs/docs/modules/memory/adding_memory.ipynb b/docs/docs/modules/memory/adding_memory.ipynb index e68e8437cc5..bbfe51344a6 100644 --- a/docs/docs/modules/memory/adding_memory.ipynb +++ b/docs/docs/modules/memory/adding_memory.ipynb @@ -181,7 +181,7 @@ " HumanMessagePromptTemplate,\n", " MessagesPlaceholder,\n", ")\n", - "from langchain.schema import SystemMessage\n", + "from langchain_core.messages import SystemMessage\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/modules/model_io/chat/quick_start.ipynb b/docs/docs/modules/model_io/chat/quick_start.ipynb index 1be5c1f3acd..3c48cf50be8 100644 --- a/docs/docs/modules/model_io/chat/quick_start.ipynb +++ b/docs/docs/modules/model_io/chat/quick_start.ipynb @@ -633,7 +633,7 @@ } ], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "chat(\n", " [\n", diff --git a/docs/docs/modules/model_io/prompts/composition.ipynb b/docs/docs/modules/model_io/prompts/composition.ipynb index 0069ca72a74..f94248efecf 100644 --- a/docs/docs/modules/model_io/prompts/composition.ipynb +++ b/docs/docs/modules/model_io/prompts/composition.ipynb @@ -169,7 +169,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import AIMessage, HumanMessage, SystemMessage" + "from langchain_core.messages import AIMessage, HumanMessage, SystemMessage" ] }, { diff --git a/docs/docs/modules/model_io/quick_start.mdx b/docs/docs/modules/model_io/quick_start.mdx index 3c6be1d5199..62a78413d2b 100644 --- a/docs/docs/modules/model_io/quick_start.mdx +++ b/docs/docs/modules/model_io/quick_start.mdx @@ -77,7 +77,7 @@ For a deeper conceptual explanation of this difference please see [this document We can see the difference between an LLM and a ChatModel when we invoke it. ```python -from langchain.schema import HumanMessage +from langchain_core.messages import HumanMessage text = "What would be a good company name for a company that makes colorful socks?" messages = [HumanMessage(content=text)] diff --git a/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb b/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb index 4c464e840d5..3cba274628e 100644 --- a/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb +++ b/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb @@ -209,7 +209,7 @@ "source": [ "import boto3\n", "from langchain.chains.graph_qa.neptune_sparql import NeptuneSparqlQAChain\n", - "from langchain.chat_models import BedrockChat\n", + "from langchain_community.chat_models import BedrockChat\n", "from langchain_community.graphs import NeptuneRdfGraph\n", "\n", "host = \"\"\n", diff --git a/libs/core/langchain_core/runnables/base.py b/libs/core/langchain_core/runnables/base.py index 8f2f23d76e0..58ab8a0a8ff 100644 --- a/libs/core/langchain_core/runnables/base.py +++ b/libs/core/langchain_core/runnables/base.py @@ -2934,7 +2934,7 @@ class RunnableGenerator(Runnable[Input, Output]): from langchain_core.prompts import ChatPromptTemplate from langchain_core.runnables import RunnableGenerator, RunnableLambda from langchain_openai import ChatOpenAI - from langchain.schema import StrOutputParser + from langchain_core.output_parsers import StrOutputParser model = ChatOpenAI() diff --git a/libs/experimental/langchain_experimental/autonomous_agents/autogpt/output_parser.py b/libs/experimental/langchain_experimental/autonomous_agents/autogpt/output_parser.py index d1d655029c3..774a1230548 100644 --- a/libs/experimental/langchain_experimental/autonomous_agents/autogpt/output_parser.py +++ b/libs/experimental/langchain_experimental/autonomous_agents/autogpt/output_parser.py @@ -3,7 +3,7 @@ import re from abc import abstractmethod from typing import Dict, NamedTuple -from langchain.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser class AutoGPTAction(NamedTuple): diff --git a/libs/experimental/langchain_experimental/comprehend_moderation/base_moderation.py b/libs/experimental/langchain_experimental/comprehend_moderation/base_moderation.py index 3d19794ffee..165962d4735 100644 --- a/libs/experimental/langchain_experimental/comprehend_moderation/base_moderation.py +++ b/libs/experimental/langchain_experimental/comprehend_moderation/base_moderation.py @@ -2,7 +2,7 @@ import uuid from typing import Any, Callable, Optional, cast from langchain.callbacks.manager import CallbackManagerForChainRun -from langchain.schema import AIMessage, HumanMessage +from langchain_core.messages import AIMessage, HumanMessage from langchain_core.prompt_values import ChatPromptValue, StringPromptValue from langchain_experimental.comprehend_moderation.pii import ComprehendPII diff --git a/libs/experimental/langchain_experimental/graph_transformers/diffbot.py b/libs/experimental/langchain_experimental/graph_transformers/diffbot.py index 302a4d1fb1e..58c84094eda 100644 --- a/libs/experimental/langchain_experimental/graph_transformers/diffbot.py +++ b/libs/experimental/langchain_experimental/graph_transformers/diffbot.py @@ -1,9 +1,9 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Union import requests -from langchain.schema import Document from langchain.utils import get_from_env from langchain_community.graphs.graph_document import GraphDocument, Node, Relationship +from langchain_core.documents import Document def format_property_key(s: str) -> str: diff --git a/libs/experimental/langchain_experimental/llm_bash/prompt.py b/libs/experimental/langchain_experimental/llm_bash/prompt.py index 72951d2fe9f..3ba55e8e064 100644 --- a/libs/experimental/langchain_experimental/llm_bash/prompt.py +++ b/libs/experimental/langchain_experimental/llm_bash/prompt.py @@ -5,7 +5,8 @@ import re from typing import List from langchain.prompts.prompt import PromptTemplate -from langchain.schema import BaseOutputParser, OutputParserException +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.exceptions import OutputParserException _PROMPT_TEMPLATE = """If someone asks you to perform a task, your job is to come up with a series of bash commands that will perform the task. There is no need to put "#!/bin/bash" in your answer. Make sure to reason step by step, using this format: diff --git a/libs/experimental/langchain_experimental/plan_and_execute/schema.py b/libs/experimental/langchain_experimental/plan_and_execute/schema.py index 41a323880a0..2fc2660def1 100644 --- a/libs/experimental/langchain_experimental/plan_and_execute/schema.py +++ b/libs/experimental/langchain_experimental/plan_and_execute/schema.py @@ -1,7 +1,7 @@ from abc import abstractmethod from typing import List, Tuple -from langchain.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser from langchain_experimental.pydantic_v1 import BaseModel, Field diff --git a/libs/experimental/langchain_experimental/retrievers/vector_sql_database.py b/libs/experimental/langchain_experimental/retrievers/vector_sql_database.py index 1ec088dbc51..fdde2da3275 100644 --- a/libs/experimental/langchain_experimental/retrievers/vector_sql_database.py +++ b/libs/experimental/langchain_experimental/retrievers/vector_sql_database.py @@ -1,11 +1,13 @@ """Vector SQL Database Chain Retriever""" + from typing import Any, Dict, List from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, CallbackManagerForRetrieverRun, ) -from langchain.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain diff --git a/libs/experimental/langchain_experimental/sql/vector_sql.py b/libs/experimental/langchain_experimental/sql/vector_sql.py index a21a6bae868..07125e1f828 100644 --- a/libs/experimental/langchain_experimental/sql/vector_sql.py +++ b/libs/experimental/langchain_experimental/sql/vector_sql.py @@ -1,4 +1,5 @@ """Vector SQL Database Chain Retriever""" + from __future__ import annotations from typing import Any, Dict, List, Optional, Sequence, Union @@ -7,11 +8,12 @@ from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.llm import LLMChain from langchain.chains.sql_database.prompt import PROMPT, SQL_PROMPTS from langchain.prompts.prompt import PromptTemplate -from langchain.schema import BaseOutputParser, BasePromptTemplate from langchain_community.tools.sql_database.prompt import QUERY_CHECKER from langchain_community.utilities.sql_database import SQLDatabase from langchain_core.embeddings import Embeddings from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.prompts import BasePromptTemplate from langchain_experimental.sql.base import INTERMEDIATE_STEPS_KEY, SQLDatabaseChain diff --git a/libs/experimental/langchain_experimental/tot/prompts.py b/libs/experimental/langchain_experimental/tot/prompts.py index b11e9072a60..a59a2be9e36 100644 --- a/libs/experimental/langchain_experimental/tot/prompts.py +++ b/libs/experimental/langchain_experimental/tot/prompts.py @@ -3,7 +3,7 @@ from textwrap import dedent from typing import List from langchain.prompts import PromptTemplate -from langchain.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser from langchain_experimental.tot.thought import ThoughtValidity diff --git a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_llama2chat.py b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_llama2chat.py index 3135b10745f..abf1129f60d 100644 --- a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_llama2chat.py +++ b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_llama2chat.py @@ -5,8 +5,8 @@ from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun, ) -from langchain.schema import AIMessage, HumanMessage, SystemMessage from langchain_core.language_models import LLM +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from langchain_experimental.chat_models import Llama2Chat from langchain_experimental.chat_models.llm_wrapper import DEFAULT_SYSTEM_PROMPT diff --git a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_orca.py b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_orca.py index 96f4c2af918..902d163c379 100644 --- a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_orca.py +++ b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_orca.py @@ -1,5 +1,5 @@ import pytest -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from langchain_experimental.chat_models import Orca from tests.unit_tests.chat_models.test_llm_wrapper_llama2chat import FakeLLM diff --git a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_vicuna.py b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_vicuna.py index 21506c3788e..8722b3ec5fc 100644 --- a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_vicuna.py +++ b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_vicuna.py @@ -1,5 +1,5 @@ import pytest -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from langchain_experimental.chat_models import Vicuna from tests.unit_tests.chat_models.test_llm_wrapper_llama2chat import FakeLLM diff --git a/templates/cassandra-synonym-caching/cassandra_synonym_caching/__init__.py b/templates/cassandra-synonym-caching/cassandra_synonym_caching/__init__.py index 85ee65b8833..04ca550915f 100644 --- a/templates/cassandra-synonym-caching/cassandra_synonym_caching/__init__.py +++ b/templates/cassandra-synonym-caching/cassandra_synonym_caching/__init__.py @@ -3,8 +3,8 @@ import os import cassio import langchain from langchain.cache import CassandraCache -from langchain.schema import BaseMessage from langchain_community.chat_models import ChatOpenAI +from langchain_core.messages import BaseMessage from langchain_core.prompts import ChatPromptTemplate from langchain_core.runnables import RunnableLambda diff --git a/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py b/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py index 0f0f8c45a89..5252e6784b2 100644 --- a/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py +++ b/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py @@ -1,7 +1,7 @@ from langchain import hub -from langchain.schema import StrOutputParser from langchain_community.chat_models import ChatAnthropic from langchain_community.utilities import WikipediaAPIWrapper +from langchain_core.output_parsers import StrOutputParser from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import RunnableLambda, RunnablePassthrough diff --git a/templates/chat-bot-feedback/README.md b/templates/chat-bot-feedback/README.md index f12aef309b1..dd8739b9dc1 100644 --- a/templates/chat-bot-feedback/README.md +++ b/templates/chat-bot-feedback/README.md @@ -69,7 +69,7 @@ from functools import partial from typing import Dict, Optional, Callable, List from langserve import RemoteRunnable from langchain.callbacks.manager import tracing_v2_enabled -from langchain.schema import BaseMessage, AIMessage, HumanMessage +from langchain_core.messages import BaseMessage, AIMessage, HumanMessage # Update with the URL provided by your LangServe server chain = RemoteRunnable("http://127.0.0.1:8031/chat-bot-feedback") diff --git a/templates/neo4j-generation/neo4j_generation/chain.py b/templates/neo4j-generation/neo4j_generation/chain.py index aa7ea35f62a..a9dac72d29c 100644 --- a/templates/neo4j-generation/neo4j_generation/chain.py +++ b/templates/neo4j-generation/neo4j_generation/chain.py @@ -3,10 +3,10 @@ from typing import List, Optional from langchain.chains.openai_functions import ( create_structured_output_chain, ) -from langchain.schema import Document from langchain_community.chat_models import ChatOpenAI from langchain_community.graphs import Neo4jGraph from langchain_community.graphs.graph_document import GraphDocument +from langchain_core.documents import Document from langchain_core.prompts import ChatPromptTemplate from neo4j_generation.utils import ( diff --git a/templates/neo4j-semantic-layer/neo4j_semantic_layer/agent.py b/templates/neo4j-semantic-layer/neo4j_semantic_layer/agent.py index 976f27b8ad6..79fbae96b7f 100644 --- a/templates/neo4j-semantic-layer/neo4j_semantic_layer/agent.py +++ b/templates/neo4j-semantic-layer/neo4j_semantic_layer/agent.py @@ -5,9 +5,9 @@ from langchain.agents.format_scratchpad import format_to_openai_function_message from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain.pydantic_v1 import BaseModel, Field -from langchain.schema import AIMessage, HumanMessage from langchain.tools.render import format_tool_to_openai_function from langchain_community.chat_models import ChatOpenAI +from langchain_core.messages import AIMessage, HumanMessage from neo4j_semantic_layer.information_tool import InformationTool from neo4j_semantic_layer.memory_tool import MemoryTool @@ -45,9 +45,9 @@ def _format_chat_history(chat_history: List[Tuple[str, str]]): agent = ( { "input": lambda x: x["input"], - "chat_history": lambda x: _format_chat_history(x["chat_history"]) - if x.get("chat_history") - else [], + "chat_history": lambda x: ( + _format_chat_history(x["chat_history"]) if x.get("chat_history") else [] + ), "agent_scratchpad": lambda x: format_to_openai_function_messages( x["intermediate_steps"] ), diff --git a/templates/neo4j-semantic-ollama/neo4j_semantic_ollama/agent.py b/templates/neo4j-semantic-ollama/neo4j_semantic_ollama/agent.py index ad4010d6cfc..08d8fdc2a17 100644 --- a/templates/neo4j-semantic-ollama/neo4j_semantic_ollama/agent.py +++ b/templates/neo4j-semantic-ollama/neo4j_semantic_ollama/agent.py @@ -8,9 +8,9 @@ from langchain.agents.output_parsers import ( ) from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain.pydantic_v1 import BaseModel, Field -from langchain.schema import AIMessage, HumanMessage from langchain.tools.render import render_text_description_and_args from langchain_community.chat_models import ChatOllama +from langchain_core.messages import AIMessage, HumanMessage from neo4j_semantic_ollama.information_tool import InformationTool from neo4j_semantic_ollama.memory_tool import MemoryTool @@ -87,9 +87,9 @@ agent = ( { "input": lambda x: x["input"], "agent_scratchpad": lambda x: format_log_to_messages(x["intermediate_steps"]), - "chat_history": lambda x: _format_chat_history(x["chat_history"]) - if x.get("chat_history") - else [], + "chat_history": lambda x: ( + _format_chat_history(x["chat_history"]) if x.get("chat_history") else [] + ), } | prompt | chat_model_with_stop diff --git a/templates/neo4j-vector-memory/neo4j_vector_memory/history.py b/templates/neo4j-vector-memory/neo4j_vector_memory/history.py index 9fccc109784..a88fafc2c7a 100644 --- a/templates/neo4j-vector-memory/neo4j_vector_memory/history.py +++ b/templates/neo4j-vector-memory/neo4j_vector_memory/history.py @@ -1,8 +1,8 @@ from typing import Any, Dict, List, Union from langchain.memory import ChatMessageHistory -from langchain.schema import AIMessage, HumanMessage from langchain_community.graphs import Neo4jGraph +from langchain_core.messages import AIMessage, HumanMessage graph = Neo4jGraph() diff --git a/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py b/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py index ba0e34e1ee8..c20f50f2183 100644 --- a/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py +++ b/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py @@ -6,13 +6,13 @@ from langchain.agents import ( ) from langchain.agents.format_scratchpad import format_to_openai_functions from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser -from langchain.schema import Document from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.tools.convert_to_openai import format_tool_to_openai_function from langchain_community.tools.tavily_search import TavilySearchResults from langchain_community.utilities.tavily_search import TavilySearchAPIWrapper from langchain_community.vectorstores import FAISS +from langchain_core.documents import Document from langchain_core.messages import AIMessage, HumanMessage from langchain_core.prompts import ( ChatPromptTemplate, diff --git a/templates/rag-conversation-zep/rag_conversation_zep/chain.py b/templates/rag-conversation-zep/rag_conversation_zep/chain.py index 32d8db63da5..1031e2cd453 100644 --- a/templates/rag-conversation-zep/rag_conversation_zep/chain.py +++ b/templates/rag-conversation-zep/rag_conversation_zep/chain.py @@ -2,13 +2,16 @@ import os from operator import itemgetter from typing import List, Tuple -from langchain.schema import AIMessage, HumanMessage, format_document from langchain_community.chat_models import ChatOpenAI from langchain_community.vectorstores.zep import CollectionConfig, ZepVectorStore from langchain_core.documents import Document -from langchain_core.messages import BaseMessage +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage from langchain_core.output_parsers import StrOutputParser -from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder +from langchain_core.prompts import ( + ChatPromptTemplate, + MessagesPlaceholder, + format_document, +) from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import ( diff --git a/templates/rag-conversation/rag_conversation/chain.py b/templates/rag-conversation/rag_conversation/chain.py index 29199b2211e..6607e028ec1 100644 --- a/templates/rag-conversation/rag_conversation/chain.py +++ b/templates/rag-conversation/rag_conversation/chain.py @@ -2,11 +2,15 @@ import os from operator import itemgetter from typing import List, Tuple -from langchain.schema import AIMessage, HumanMessage, format_document from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings +from langchain_core.messages import AIMessage, HumanMessage from langchain_core.output_parsers import StrOutputParser -from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder +from langchain_core.prompts import ( + ChatPromptTemplate, + MessagesPlaceholder, + format_document, +) from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import ( diff --git a/templates/rag-elasticsearch/rag_elasticsearch/chain.py b/templates/rag-elasticsearch/rag_elasticsearch/chain.py index a28060e0e6e..778003e7883 100644 --- a/templates/rag-elasticsearch/rag_elasticsearch/chain.py +++ b/templates/rag-elasticsearch/rag_elasticsearch/chain.py @@ -1,11 +1,12 @@ from operator import itemgetter from typing import List, Optional, Tuple -from langchain.schema import BaseMessage, format_document from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import HuggingFaceEmbeddings from langchain_community.vectorstores.elasticsearch import ElasticsearchStore +from langchain_core.messages import BaseMessage from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts import format_document from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import RunnableParallel, RunnablePassthrough diff --git a/templates/rag-gpt-crawler/rag_gpt_crawler/chain.py b/templates/rag-gpt-crawler/rag_gpt_crawler/chain.py index 6fce7456746..3e322446b67 100644 --- a/templates/rag-gpt-crawler/rag_gpt_crawler/chain.py +++ b/templates/rag-gpt-crawler/rag_gpt_crawler/chain.py @@ -1,11 +1,11 @@ import json from pathlib import Path -from langchain.schema import Document from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.vectorstores import Chroma +from langchain_core.documents import Document from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel diff --git a/templates/rag-multi-index-fusion/rag_multi_index_fusion/chain.py b/templates/rag-multi-index-fusion/rag_multi_index_fusion/chain.py index 7c059ff9008..48a42d088ac 100644 --- a/templates/rag-multi-index-fusion/rag_multi_index_fusion/chain.py +++ b/templates/rag-multi-index-fusion/rag_multi_index_fusion/chain.py @@ -7,10 +7,10 @@ from langchain.retrievers import ( PubMedRetriever, WikipediaRetriever, ) -from langchain.schema import StrOutputParser from langchain.utils.math import cosine_similarity from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings +from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import ( diff --git a/templates/rag-multi-index-router/rag_multi_index_router/chain.py b/templates/rag-multi-index-router/rag_multi_index_router/chain.py index 14262131139..fddc1abeb03 100644 --- a/templates/rag-multi-index-router/rag_multi_index_router/chain.py +++ b/templates/rag-multi-index-router/rag_multi_index_router/chain.py @@ -8,9 +8,9 @@ from langchain.retrievers import ( PubMedRetriever, WikipediaRetriever, ) -from langchain.schema import StrOutputParser from langchain.utils.openai_functions import convert_pydantic_to_openai_function from langchain_community.chat_models import ChatOpenAI +from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import ( diff --git a/templates/rag-self-query/rag_self_query/chain.py b/templates/rag-self-query/rag_self_query/chain.py index 340ec888869..b0526ef8900 100644 --- a/templates/rag-self-query/rag_self_query/chain.py +++ b/templates/rag-self-query/rag_self_query/chain.py @@ -3,11 +3,11 @@ from operator import itemgetter from typing import List, Tuple from langchain.retrievers import SelfQueryRetriever -from langchain.schema import format_document from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.vectorstores.elasticsearch import ElasticsearchStore from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts import format_document from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import RunnableParallel, RunnablePassthrough diff --git a/templates/rag-timescale-conversation/rag_timescale_conversation/chain.py b/templates/rag-timescale-conversation/rag_timescale_conversation/chain.py index 0efa9f851dc..1456e67302c 100644 --- a/templates/rag-timescale-conversation/rag_timescale_conversation/chain.py +++ b/templates/rag-timescale-conversation/rag_timescale_conversation/chain.py @@ -4,12 +4,16 @@ from operator import itemgetter from typing import List, Optional, Tuple from dotenv import find_dotenv, load_dotenv -from langchain.schema import AIMessage, HumanMessage, format_document from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.vectorstores.timescalevector import TimescaleVector +from langchain_core.messages import AIMessage, HumanMessage from langchain_core.output_parsers import StrOutputParser -from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder +from langchain_core.prompts import ( + ChatPromptTemplate, + MessagesPlaceholder, + format_document, +) from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import ( @@ -147,12 +151,16 @@ _inputs = RunnableParallel( ) _datetime_to_string = RunnablePassthrough.assign( - start_date=lambda x: x.get("start_date", None).isoformat() - if x.get("start_date", None) is not None - else None, - end_date=lambda x: x.get("end_date", None).isoformat() - if x.get("end_date", None) is not None - else None, + start_date=lambda x: ( + x.get("start_date", None).isoformat() + if x.get("start_date", None) is not None + else None + ), + end_date=lambda x: ( + x.get("end_date", None).isoformat() + if x.get("end_date", None) is not None + else None + ), ).with_types(input_type=ChatHistory) chain = ( diff --git a/templates/retrieval-agent-fireworks/retrieval_agent_fireworks/chain.py b/templates/retrieval-agent-fireworks/retrieval_agent_fireworks/chain.py index 7207cc99d32..6fef1ba1a97 100644 --- a/templates/retrieval-agent-fireworks/retrieval_agent_fireworks/chain.py +++ b/templates/retrieval-agent-fireworks/retrieval_agent_fireworks/chain.py @@ -5,12 +5,13 @@ from langchain.agents import AgentExecutor from langchain.agents.format_scratchpad import format_log_to_str from langchain.agents.output_parsers import ReActJsonSingleInputOutputParser from langchain.callbacks.manager import CallbackManagerForRetrieverRun -from langchain.schema import BaseRetriever, Document from langchain.tools.render import render_text_description from langchain.tools.retriever import create_retriever_tool from langchain_community.chat_models.fireworks import ChatFireworks from langchain_community.utilities.arxiv import ArxivAPIWrapper +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel +from langchain_core.retrievers import BaseRetriever MODEL_ID = "accounts/fireworks/models/mixtral-8x7b-instruct" diff --git a/templates/retrieval-agent/retrieval_agent/chain.py b/templates/retrieval-agent/retrieval_agent/chain.py index 2f774f6bfe3..f74e37d71b6 100644 --- a/templates/retrieval-agent/retrieval_agent/chain.py +++ b/templates/retrieval-agent/retrieval_agent/chain.py @@ -5,13 +5,14 @@ from langchain.agents import AgentExecutor from langchain.agents.format_scratchpad import format_to_openai_function_messages from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser from langchain.callbacks.manager import CallbackManagerForRetrieverRun -from langchain.schema import BaseRetriever, Document from langchain.tools.retriever import create_retriever_tool from langchain_community.tools.convert_to_openai import format_tool_to_openai_function from langchain_community.utilities.arxiv import ArxivAPIWrapper +from langchain_core.documents import Document from langchain_core.messages import AIMessage, HumanMessage from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain_core.pydantic_v1 import BaseModel, Field +from langchain_core.retrievers import BaseRetriever from langchain_openai import AzureChatOpenAI diff --git a/templates/self-query-qdrant/README.md b/templates/self-query-qdrant/README.md index c32af05dac6..bbb0f7fccd6 100644 --- a/templates/self-query-qdrant/README.md +++ b/templates/self-query-qdrant/README.md @@ -63,7 +63,7 @@ You can find the documents in the `packages/self-query-qdrant/self_query_qdrant/ Here is one of the documents: ```python -from langchain.schema import Document +from langchain_core.documents import Document Document( page_content="Spaghetti with meatballs and tomato sauce", @@ -108,7 +108,7 @@ chain = create_chain( The same goes for the `initialize` function that creates a Qdrant collection and indexes the documents: ```python -from langchain.schema import Document +from langchain_core.documents import Document from langchain_community.embeddings import HuggingFaceEmbeddings from self_query_qdrant.chain import initialize diff --git a/templates/self-query-qdrant/self_query_qdrant/chain.py b/templates/self-query-qdrant/self_query_qdrant/chain.py index 799d8816b86..ccc30be6361 100644 --- a/templates/self-query-qdrant/self_query_qdrant/chain.py +++ b/templates/self-query-qdrant/self_query_qdrant/chain.py @@ -3,11 +3,11 @@ from typing import List, Optional from langchain.chains.query_constructor.schema import AttributeInfo from langchain.retrievers import SelfQueryRetriever -from langchain.schema import Document, StrOutputParser from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.llms import BaseLLM from langchain_community.llms.openai import OpenAI from langchain_community.vectorstores.qdrant import Qdrant +from langchain_core.documents import Document, StrOutputParser from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import RunnableParallel, RunnablePassthrough diff --git a/templates/self-query-qdrant/self_query_qdrant/defaults.py b/templates/self-query-qdrant/self_query_qdrant/defaults.py index ec0f90b1318..84dfb98560f 100644 --- a/templates/self-query-qdrant/self_query_qdrant/defaults.py +++ b/templates/self-query-qdrant/self_query_qdrant/defaults.py @@ -1,5 +1,5 @@ from langchain.chains.query_constructor.schema import AttributeInfo -from langchain.schema import Document +from langchain_core.documents import Document # Qdrant collection name DEFAULT_COLLECTION_NAME = "restaurants" diff --git a/templates/self-query-qdrant/self_query_qdrant/helper.py b/templates/self-query-qdrant/self_query_qdrant/helper.py index 8c9074b8cf8..ab3156efb77 100644 --- a/templates/self-query-qdrant/self_query_qdrant/helper.py +++ b/templates/self-query-qdrant/self_query_qdrant/helper.py @@ -1,7 +1,7 @@ from string import Formatter from typing import List -from langchain.schema import Document +from langchain_core.documents import Document document_template = """ PASSAGE: {page_content} diff --git a/templates/solo-performance-prompting-agent/solo_performance_prompting_agent/parser.py b/templates/solo-performance-prompting-agent/solo_performance_prompting_agent/parser.py index f81c61063fa..0fceca50947 100644 --- a/templates/solo-performance-prompting-agent/solo_performance_prompting_agent/parser.py +++ b/templates/solo-performance-prompting-agent/solo_performance_prompting_agent/parser.py @@ -1,4 +1,4 @@ -from langchain.schema import AgentAction, AgentFinish +from langchain_core.agents import AgentAction, AgentFinish def parse_output(message: str): diff --git a/templates/xml-agent/xml_agent/agent.py b/templates/xml-agent/xml_agent/agent.py index 6759fa91985..13345e32407 100644 --- a/templates/xml-agent/xml_agent/agent.py +++ b/templates/xml-agent/xml_agent/agent.py @@ -2,10 +2,10 @@ from typing import List, Tuple from langchain.agents import AgentExecutor from langchain.agents.format_scratchpad import format_xml -from langchain.schema import AIMessage, HumanMessage from langchain.tools import DuckDuckGoSearchRun from langchain.tools.render import render_text_description from langchain_community.chat_models import ChatAnthropic +from langchain_core.messages import AIMessage, HumanMessage from langchain_core.pydantic_v1 import BaseModel, Field from xml_agent.prompts import conversational_prompt, parse_output diff --git a/templates/xml-agent/xml_agent/prompts.py b/templates/xml-agent/xml_agent/prompts.py index 652f8cf6dd8..d39e0908858 100644 --- a/templates/xml-agent/xml_agent/prompts.py +++ b/templates/xml-agent/xml_agent/prompts.py @@ -1,4 +1,4 @@ -from langchain.schema import AgentAction, AgentFinish +from langchain_core.agents import AgentAction, AgentFinish from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder template = """You are a helpful assistant. Help the user answer any questions.