From 27a4682415fb93dd5880213d36a957d336bec7c0 Mon Sep 17 00:00:00 2001 From: Leonid Ganeline Date: Thu, 18 Apr 2024 23:04:07 -0700 Subject: [PATCH] docs: imports update (#20625) Updated imports in docs Co-authored-by: Erick Friis --- docs/docs/expression_language/primitives/configure.ipynb | 4 ++-- .../safety/presidio_data_anonymization/index.ipynb | 2 +- .../presidio_data_anonymization/qa_privacy_protection.ipynb | 2 +- .../safety/presidio_data_anonymization/reversible.ipynb | 2 +- docs/docs/integrations/callbacks/fiddler.ipynb | 2 +- docs/docs/integrations/chat/maritalk.ipynb | 2 +- docs/docs/integrations/graphs/memgraph.ipynb | 2 +- docs/docs/integrations/graphs/neo4j_cypher.ipynb | 2 +- .../integrations/llms/alibabacloud_pai_eas_endpoint.ipynb | 2 +- docs/docs/integrations/llms/ibm_watsonx.ipynb | 2 +- docs/docs/integrations/llms/mlx_pipelines.ipynb | 2 +- docs/docs/integrations/llms/openvino.ipynb | 2 +- docs/docs/integrations/llms/solar.ipynb | 2 +- docs/docs/integrations/llms/weight_only_quantization.ipynb | 2 +- docs/docs/integrations/vectorstores/documentdb.ipynb | 2 +- docs/docs/integrations/vectorstores/sap_hanavector.ipynb | 2 +- .../model_io/prompts/example_selectors/length_based.ipynb | 4 ++-- .../modules/model_io/prompts/example_selectors/mmr.ipynb | 6 +++--- .../model_io/prompts/example_selectors/ngram_overlap.ipynb | 6 ++++-- .../model_io/prompts/example_selectors/similarity.ipynb | 4 ++-- docs/docs/modules/model_io/quick_start.mdx | 2 +- docs/docs/use_cases/graph/semantic.ipynb | 2 +- 22 files changed, 30 insertions(+), 28 deletions(-) diff --git a/docs/docs/expression_language/primitives/configure.ipynb b/docs/docs/expression_language/primitives/configure.ipynb index f5e04a30410..822131c85eb 100644 --- a/docs/docs/expression_language/primitives/configure.ipynb +++ b/docs/docs/expression_language/primitives/configure.ipynb @@ -63,7 +63,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import PromptTemplate\n", + "from langchain_core.prompts import PromptTemplate\n", "from langchain_core.runnables import ConfigurableField\n", "from langchain_openai import ChatOpenAI\n", "\n", @@ -285,8 +285,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import PromptTemplate\n", "from langchain_community.chat_models import ChatAnthropic\n", + "from langchain_core.prompts import PromptTemplate\n", "from langchain_core.runnables import ConfigurableField\n", "from langchain_openai import ChatOpenAI" ] diff --git a/docs/docs/guides/productionization/safety/presidio_data_anonymization/index.ipynb b/docs/docs/guides/productionization/safety/presidio_data_anonymization/index.ipynb index 1ec5b2a3ae6..e1b85e0d942 100644 --- a/docs/docs/guides/productionization/safety/presidio_data_anonymization/index.ipynb +++ b/docs/docs/guides/productionization/safety/presidio_data_anonymization/index.ipynb @@ -137,7 +137,7 @@ } ], "source": [ - "from langchain.prompts.prompt import PromptTemplate\n", + "from langchain_core.prompts.prompt import PromptTemplate\n", "from langchain_openai import ChatOpenAI\n", "\n", "anonymizer = PresidioAnonymizer()\n", diff --git a/docs/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection.ipynb b/docs/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection.ipynb index 0791996598b..76cc5b035da 100644 --- a/docs/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection.ipynb +++ b/docs/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection.ipynb @@ -878,8 +878,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts.prompt import PromptTemplate\n", "from langchain_core.prompts import format_document\n", + "from langchain_core.prompts.prompt import PromptTemplate\n", "\n", "DEFAULT_DOCUMENT_PROMPT = PromptTemplate.from_template(template=\"{page_content}\")\n", "\n", diff --git a/docs/docs/guides/productionization/safety/presidio_data_anonymization/reversible.ipynb b/docs/docs/guides/productionization/safety/presidio_data_anonymization/reversible.ipynb index 87c5a444e1b..770d68b1b88 100644 --- a/docs/docs/guides/productionization/safety/presidio_data_anonymization/reversible.ipynb +++ b/docs/docs/guides/productionization/safety/presidio_data_anonymization/reversible.ipynb @@ -207,7 +207,7 @@ } ], "source": [ - "from langchain.prompts.prompt import PromptTemplate\n", + "from langchain_core.prompts.prompt import PromptTemplate\n", "from langchain_openai import ChatOpenAI\n", "\n", "anonymizer = PresidioReversibleAnonymizer()\n", diff --git a/docs/docs/integrations/callbacks/fiddler.ipynb b/docs/docs/integrations/callbacks/fiddler.ipynb index 55d246aa91d..0a2ab4e683f 100644 --- a/docs/docs/integrations/callbacks/fiddler.ipynb +++ b/docs/docs/integrations/callbacks/fiddler.ipynb @@ -151,7 +151,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import (\n", + "from langchain_core.prompts import (\n", " ChatPromptTemplate,\n", " FewShotChatMessagePromptTemplate,\n", ")\n", diff --git a/docs/docs/integrations/chat/maritalk.ipynb b/docs/docs/integrations/chat/maritalk.ipynb index 5ae77c16242..8518ad23b3c 100644 --- a/docs/docs/integrations/chat/maritalk.ipynb +++ b/docs/docs/integrations/chat/maritalk.ipynb @@ -60,9 +60,9 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts.chat import ChatPromptTemplate\n", "from langchain_community.chat_models import ChatMaritalk\n", "from langchain_core.output_parsers import StrOutputParser\n", + "from langchain_core.prompts.chat import ChatPromptTemplate\n", "\n", "llm = ChatMaritalk(\n", " model=\"sabia-2-medium\", # Available models: sabia-2-small and sabia-2-medium\n", diff --git a/docs/docs/integrations/graphs/memgraph.ipynb b/docs/docs/integrations/graphs/memgraph.ipynb index c11677b479f..937bb4bbc8a 100644 --- a/docs/docs/integrations/graphs/memgraph.ipynb +++ b/docs/docs/integrations/graphs/memgraph.ipynb @@ -79,8 +79,8 @@ "\n", "from gqlalchemy import Memgraph\n", "from langchain.chains import GraphCypherQAChain\n", - "from langchain.prompts import PromptTemplate\n", "from langchain_community.graphs import MemgraphGraph\n", + "from langchain_core.prompts import PromptTemplate\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/graphs/neo4j_cypher.ipynb b/docs/docs/integrations/graphs/neo4j_cypher.ipynb index 6855abe08a4..7b1b854ea05 100644 --- a/docs/docs/integrations/graphs/neo4j_cypher.ipynb +++ b/docs/docs/integrations/graphs/neo4j_cypher.ipynb @@ -389,7 +389,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts.prompt import PromptTemplate\n", + "from langchain_core.prompts.prompt import PromptTemplate\n", "\n", "CYPHER_GENERATION_TEMPLATE = \"\"\"Task:Generate Cypher statement to query a graph database.\n", "Instructions:\n", diff --git a/docs/docs/integrations/llms/alibabacloud_pai_eas_endpoint.ipynb b/docs/docs/integrations/llms/alibabacloud_pai_eas_endpoint.ipynb index 57f98a9f7bb..f10c07770c8 100644 --- a/docs/docs/integrations/llms/alibabacloud_pai_eas_endpoint.ipynb +++ b/docs/docs/integrations/llms/alibabacloud_pai_eas_endpoint.ipynb @@ -16,8 +16,8 @@ "outputs": [], "source": [ "from langchain.chains import LLMChain\n", - "from langchain.prompts import PromptTemplate\n", "from langchain_community.llms.pai_eas_endpoint import PaiEasEndpoint\n", + "from langchain_core.prompts import PromptTemplate\n", "\n", "template = \"\"\"Question: {question}\n", "\n", diff --git a/docs/docs/integrations/llms/ibm_watsonx.ipynb b/docs/docs/integrations/llms/ibm_watsonx.ipynb index 7a0d65d91c1..0a0168a53fd 100644 --- a/docs/docs/integrations/llms/ibm_watsonx.ipynb +++ b/docs/docs/integrations/llms/ibm_watsonx.ipynb @@ -210,7 +210,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import PromptTemplate\n", + "from langchain_core.prompts import PromptTemplate\n", "\n", "template = \"Generate a random question about {topic}: Question: \"\n", "prompt = PromptTemplate.from_template(template)" diff --git a/docs/docs/integrations/llms/mlx_pipelines.ipynb b/docs/docs/integrations/llms/mlx_pipelines.ipynb index 7ea22f1df30..c6c4b8a7446 100644 --- a/docs/docs/integrations/llms/mlx_pipelines.ipynb +++ b/docs/docs/integrations/llms/mlx_pipelines.ipynb @@ -103,7 +103,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import PromptTemplate\n", + "from langchain_core.prompts import PromptTemplate\n", "\n", "template = \"\"\"Question: {question}\n", "\n", diff --git a/docs/docs/integrations/llms/openvino.ipynb b/docs/docs/integrations/llms/openvino.ipynb index b3dd8d43e9c..2c30e0510d0 100644 --- a/docs/docs/integrations/llms/openvino.ipynb +++ b/docs/docs/integrations/llms/openvino.ipynb @@ -116,7 +116,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import PromptTemplate\n", + "from langchain_core.prompts import PromptTemplate\n", "\n", "template = \"\"\"Question: {question}\n", "\n", diff --git a/docs/docs/integrations/llms/solar.ipynb b/docs/docs/integrations/llms/solar.ipynb index b2682e2d288..a5d71bc4646 100644 --- a/docs/docs/integrations/llms/solar.ipynb +++ b/docs/docs/integrations/llms/solar.ipynb @@ -31,8 +31,8 @@ "outputs": [], "source": [ "from langchain.chains import LLMChain\n", - "from langchain.prompts import PromptTemplate\n", "from langchain_community.llms.solar import Solar\n", + "from langchain_core.prompts import PromptTemplate\n", "\n", "template = \"\"\"Question: {question}\n", "\n", diff --git a/docs/docs/integrations/llms/weight_only_quantization.ipynb b/docs/docs/integrations/llms/weight_only_quantization.ipynb index 45ea73ef0da..a3a215ee398 100644 --- a/docs/docs/integrations/llms/weight_only_quantization.ipynb +++ b/docs/docs/integrations/llms/weight_only_quantization.ipynb @@ -115,7 +115,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import PromptTemplate\n", + "from langchain_core.prompts import PromptTemplate\n", "\n", "template = \"\"\"Question: {question}\n", "\n", diff --git a/docs/docs/integrations/vectorstores/documentdb.ipynb b/docs/docs/integrations/vectorstores/documentdb.ipynb index bce6e0bd213..e39e3276481 100644 --- a/docs/docs/integrations/vectorstores/documentdb.ipynb +++ b/docs/docs/integrations/vectorstores/documentdb.ipynb @@ -415,7 +415,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import PromptTemplate\n", + "from langchain_core.prompts import PromptTemplate\n", "\n", "prompt_template = \"\"\"Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.\n", "\n", diff --git a/docs/docs/integrations/vectorstores/sap_hanavector.ipynb b/docs/docs/integrations/vectorstores/sap_hanavector.ipynb index d9437fef1df..42e89eb21f5 100644 --- a/docs/docs/integrations/vectorstores/sap_hanavector.ipynb +++ b/docs/docs/integrations/vectorstores/sap_hanavector.ipynb @@ -403,7 +403,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import PromptTemplate\n", + "from langchain_core.prompts import PromptTemplate\n", "\n", "prompt_template = \"\"\"\n", "You are an expert in state of the union topics. You are provided multiple context items that are related to the prompt you have to answer.\n", diff --git a/docs/docs/modules/model_io/prompts/example_selectors/length_based.ipynb b/docs/docs/modules/model_io/prompts/example_selectors/length_based.ipynb index af1f9339d6e..7680b26d68b 100644 --- a/docs/docs/modules/model_io/prompts/example_selectors/length_based.ipynb +++ b/docs/docs/modules/model_io/prompts/example_selectors/length_based.ipynb @@ -17,8 +17,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import FewShotPromptTemplate, PromptTemplate\n", - "from langchain.prompts.example_selector import LengthBasedExampleSelector\n", + "from langchain_core.example_selectors import LengthBasedExampleSelector\n", + "from langchain_core.prompts import FewShotPromptTemplate, PromptTemplate\n", "\n", "# Examples of a pretend task of creating antonyms.\n", "examples = [\n", diff --git a/docs/docs/modules/model_io/prompts/example_selectors/mmr.ipynb b/docs/docs/modules/model_io/prompts/example_selectors/mmr.ipynb index 4a56b13d839..e9f7aa73b13 100644 --- a/docs/docs/modules/model_io/prompts/example_selectors/mmr.ipynb +++ b/docs/docs/modules/model_io/prompts/example_selectors/mmr.ipynb @@ -17,12 +17,12 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import FewShotPromptTemplate, PromptTemplate\n", - "from langchain.prompts.example_selector import (\n", + "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.example_selectors import (\n", " MaxMarginalRelevanceExampleSelector,\n", " SemanticSimilarityExampleSelector,\n", ")\n", - "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.prompts import FewShotPromptTemplate, PromptTemplate\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "example_prompt = PromptTemplate(\n", diff --git a/docs/docs/modules/model_io/prompts/example_selectors/ngram_overlap.ipynb b/docs/docs/modules/model_io/prompts/example_selectors/ngram_overlap.ipynb index 12d07d60cd9..2157b23f501 100644 --- a/docs/docs/modules/model_io/prompts/example_selectors/ngram_overlap.ipynb +++ b/docs/docs/modules/model_io/prompts/example_selectors/ngram_overlap.ipynb @@ -19,8 +19,10 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import FewShotPromptTemplate, PromptTemplate\n", - "from langchain.prompts.example_selector.ngram_overlap import NGramOverlapExampleSelector\n", + "from langchain_community.example_selector.ngram_overlap import (\n", + " NGramOverlapExampleSelector,\n", + ")\n", + "from langchain_core.prompts import FewShotPromptTemplate, PromptTemplate\n", "\n", "example_prompt = PromptTemplate(\n", " input_variables=[\"input\", \"output\"],\n", diff --git a/docs/docs/modules/model_io/prompts/example_selectors/similarity.ipynb b/docs/docs/modules/model_io/prompts/example_selectors/similarity.ipynb index 733e2ad74b4..39e5504a77e 100644 --- a/docs/docs/modules/model_io/prompts/example_selectors/similarity.ipynb +++ b/docs/docs/modules/model_io/prompts/example_selectors/similarity.ipynb @@ -17,9 +17,9 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import FewShotPromptTemplate, PromptTemplate\n", - "from langchain.prompts.example_selector import SemanticSimilarityExampleSelector\n", "from langchain_chroma import Chroma\n", + "from langchain_core.example_selectors import SemanticSimilarityExampleSelector\n", + "from langchain_core.prompts import FewShotPromptTemplate, PromptTemplate\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "example_prompt = PromptTemplate(\n", diff --git a/docs/docs/modules/model_io/quick_start.mdx b/docs/docs/modules/model_io/quick_start.mdx index 6764b75e84a..8fcaf2f42be 100644 --- a/docs/docs/modules/model_io/quick_start.mdx +++ b/docs/docs/modules/model_io/quick_start.mdx @@ -183,7 +183,7 @@ Each `ChatMessageTemplate` contains instructions for how to format that `ChatMes Let's take a look at this below: ```python -from langchain.prompts.chat import ChatPromptTemplate +from langchain_core.prompts.chat import ChatPromptTemplate template = "You are a helpful assistant that translates {input_language} to {output_language}." human_template = "{text}" diff --git a/docs/docs/use_cases/graph/semantic.ipynb b/docs/docs/use_cases/graph/semantic.ipynb index aed2af7d19a..f4acb573545 100644 --- a/docs/docs/use_cases/graph/semantic.ipynb +++ b/docs/docs/use_cases/graph/semantic.ipynb @@ -287,8 +287,8 @@ "from langchain.agents import AgentExecutor\n", "from langchain.agents.format_scratchpad import format_to_openai_function_messages\n", "from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser\n", - "from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder\n", "from langchain_core.messages import AIMessage, HumanMessage\n", + "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n", "from langchain_core.utils.function_calling import convert_to_openai_function\n", "from langchain_openai import ChatOpenAI\n", "\n",