diff --git a/cookbook/LLaMA2_sql_chat.ipynb b/cookbook/LLaMA2_sql_chat.ipynb index edd98ce44a8..f31a7fd58f4 100644 --- a/cookbook/LLaMA2_sql_chat.ipynb +++ b/cookbook/LLaMA2_sql_chat.ipynb @@ -107,7 +107,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import SQLDatabase\n", + "from langchain_community.utilities import SQLDatabase\n", "\n", "db = SQLDatabase.from_uri(\"sqlite:///nba_roster.db\", sample_rows_in_table_info=0)\n", "\n", diff --git a/cookbook/autogpt/autogpt.ipynb b/cookbook/autogpt/autogpt.ipynb index 0fcf1fe4c7b..911c8c3c86c 100644 --- a/cookbook/autogpt/autogpt.ipynb +++ b/cookbook/autogpt/autogpt.ipynb @@ -28,9 +28,9 @@ "outputs": [], "source": [ "from langchain.agents import Tool\n", - "from langchain.utilities import SerpAPIWrapper\n", "from langchain_community.tools.file_management.read import ReadFileTool\n", "from langchain_community.tools.file_management.write import WriteFileTool\n", + "from langchain_community.utilities import SerpAPIWrapper\n", "\n", "search = SerpAPIWrapper()\n", "tools = [\n", diff --git a/cookbook/baby_agi_with_agent.ipynb b/cookbook/baby_agi_with_agent.ipynb index 5f55b1ce7d9..393b026b87b 100644 --- a/cookbook/baby_agi_with_agent.ipynb +++ b/cookbook/baby_agi_with_agent.ipynb @@ -108,8 +108,8 @@ "source": [ "from langchain.agents import AgentExecutor, Tool, ZeroShotAgent\n", "from langchain.chains import LLMChain\n", - "from langchain.utilities import SerpAPIWrapper\n", "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import SerpAPIWrapper\n", "\n", "todo_prompt = PromptTemplate.from_template(\n", " \"You are a planner who is an expert at coming up with a todo list for a given objective. Come up with a todo list for this objective: {objective}\"\n", diff --git a/cookbook/custom_agent_with_tool_retrieval.ipynb b/cookbook/custom_agent_with_tool_retrieval.ipynb index 5e5c08d3f38..28932f59b22 100644 --- a/cookbook/custom_agent_with_tool_retrieval.ipynb +++ b/cookbook/custom_agent_with_tool_retrieval.ipynb @@ -41,8 +41,8 @@ "from langchain.chains import LLMChain\n", "from langchain.prompts import StringPromptTemplate\n", "from langchain.schema import AgentAction, AgentFinish\n", - "from langchain.utilities import SerpAPIWrapper\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import SerpAPIWrapper" ] }, { diff --git a/cookbook/custom_multi_action_agent.ipynb b/cookbook/custom_multi_action_agent.ipynb index bfe084a280e..271c4c0d816 100644 --- a/cookbook/custom_multi_action_agent.ipynb +++ b/cookbook/custom_multi_action_agent.ipynb @@ -26,7 +26,7 @@ "outputs": [], "source": [ "from langchain.agents import AgentExecutor, BaseMultiActionAgent, Tool\n", - "from langchain.utilities import SerpAPIWrapper" + "from langchain_community.utilities import SerpAPIWrapper" ] }, { diff --git a/cookbook/databricks_sql_db.ipynb b/cookbook/databricks_sql_db.ipynb index 85e04c85653..6cb5da4fe9e 100644 --- a/cookbook/databricks_sql_db.ipynb +++ b/cookbook/databricks_sql_db.ipynb @@ -80,7 +80,7 @@ "outputs": [], "source": [ "# Connecting to Databricks with SQLDatabase wrapper\n", - "from langchain.utilities import SQLDatabase\n", + "from langchain_community.utilities import SQLDatabase\n", "\n", "db = SQLDatabase.from_databricks(catalog=\"samples\", schema=\"nyctaxi\")" ] @@ -115,7 +115,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import SQLDatabaseChain\n", + "from langchain_community.utilities import SQLDatabaseChain\n", "\n", "db_chain = SQLDatabaseChain.from_llm(llm, db, verbose=True)" ] diff --git a/cookbook/forward_looking_retrieval_augmented_generation.ipynb b/cookbook/forward_looking_retrieval_augmented_generation.ipynb index d6cacd0a3f2..e7b3ecda227 100644 --- a/cookbook/forward_looking_retrieval_augmented_generation.ipynb +++ b/cookbook/forward_looking_retrieval_augmented_generation.ipynb @@ -74,9 +74,9 @@ " CallbackManagerForRetrieverRun,\n", ")\n", "from langchain.schema import BaseRetriever, Document\n", - "from langchain.utilities import GoogleSerperAPIWrapper\n", "from langchain_community.chat_models import ChatOpenAI\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import GoogleSerperAPIWrapper" ] }, { diff --git a/cookbook/myscale_vector_sql.ipynb b/cookbook/myscale_vector_sql.ipynb index b2d9feefb17..b02a19f7239 100644 --- a/cookbook/myscale_vector_sql.ipynb +++ b/cookbook/myscale_vector_sql.ipynb @@ -32,8 +32,8 @@ "\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import PromptTemplate\n", - "from langchain.utilities import SQLDatabase\n", "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import SQLDatabase\n", "from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain\n", "from sqlalchemy import MetaData, create_engine\n", "\n", @@ -75,8 +75,8 @@ "outputs": [], "source": [ "from langchain.callbacks import StdOutCallbackHandler\n", - "from langchain.utilities.sql_database import SQLDatabase\n", "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities.sql_database import SQLDatabase\n", "from langchain_experimental.sql.prompt import MYSCALE_PROMPT\n", "from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain\n", "\n", diff --git a/cookbook/plan_and_execute_agent.ipynb b/cookbook/plan_and_execute_agent.ipynb index 50e5b1ab65f..2bbdcc6bb01 100644 --- a/cookbook/plan_and_execute_agent.ipynb +++ b/cookbook/plan_and_execute_agent.ipynb @@ -30,9 +30,9 @@ "outputs": [], "source": [ "from langchain.chains import LLMMathChain\n", - "from langchain.utilities import DuckDuckGoSearchAPIWrapper\n", "from langchain_community.chat_models import ChatOpenAI\n", "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import DuckDuckGoSearchAPIWrapper\n", "from langchain_core.tools import Tool\n", "from langchain_experimental.plan_and_execute import (\n", " PlanAndExecute,\n", diff --git a/cookbook/rewrite.ipynb b/cookbook/rewrite.ipynb index 69318f55cf7..b60ee96b95b 100644 --- a/cookbook/rewrite.ipynb +++ b/cookbook/rewrite.ipynb @@ -32,8 +32,8 @@ "outputs": [], "source": [ "from langchain.prompts import ChatPromptTemplate\n", - "from langchain.utilities import DuckDuckGoSearchAPIWrapper\n", "from langchain_community.chat_models import ChatOpenAI\n", + "from langchain_community.utilities import DuckDuckGoSearchAPIWrapper\n", "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.runnables import RunnablePassthrough" ] diff --git a/cookbook/sharedmemory_for_tools.ipynb b/cookbook/sharedmemory_for_tools.ipynb index 1624199fe2f..9134a263aee 100644 --- a/cookbook/sharedmemory_for_tools.ipynb +++ b/cookbook/sharedmemory_for_tools.ipynb @@ -26,8 +26,8 @@ "from langchain.chains import LLMChain\n", "from langchain.memory import ConversationBufferMemory, ReadOnlySharedMemory\n", "from langchain.prompts import PromptTemplate\n", - "from langchain.utilities import GoogleSearchAPIWrapper\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import GoogleSearchAPIWrapper" ] }, { diff --git a/cookbook/sql_db_qa.mdx b/cookbook/sql_db_qa.mdx index c3299c38882..851fae5202b 100644 --- a/cookbook/sql_db_qa.mdx +++ b/cookbook/sql_db_qa.mdx @@ -10,7 +10,7 @@ To set it up, follow the instructions on https://database.guide/2-sample-databas ```python from langchain_community.llms import OpenAI -from langchain.utilities import SQLDatabase +from langchain_community.utilities import SQLDatabase from langchain_experimental.sql import SQLDatabaseChain ``` @@ -201,7 +201,7 @@ How to add memory to a SQLDatabaseChain: ```python from langchain_community.llms import OpenAI -from langchain.utilities import SQLDatabase +from langchain_community.utilities import SQLDatabase from langchain_experimental.sql import SQLDatabaseChain ``` @@ -679,7 +679,7 @@ local_llm = HuggingFacePipeline(pipeline=pipe) ```python -from langchain.utilities import SQLDatabase +from langchain_community.utilities import SQLDatabase from langchain_experimental.sql import SQLDatabaseChain db = SQLDatabase.from_uri("sqlite:///../../../../notebooks/Chinook.db", include_tables=['Customer']) diff --git a/cookbook/stepback-qa.ipynb b/cookbook/stepback-qa.ipynb index 60a9252b637..e06652e80ed 100644 --- a/cookbook/stepback-qa.ipynb +++ b/cookbook/stepback-qa.ipynb @@ -129,7 +129,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import DuckDuckGoSearchAPIWrapper\n", + "from langchain_community.utilities import DuckDuckGoSearchAPIWrapper\n", "\n", "search = DuckDuckGoSearchAPIWrapper(max_results=4)\n", "\n", diff --git a/docs/docs/expression_language/cookbook/prompt_size.ipynb b/docs/docs/expression_language/cookbook/prompt_size.ipynb index 79cbcc3d0cd..b19f027829f 100644 --- a/docs/docs/expression_language/cookbook/prompt_size.ipynb +++ b/docs/docs/expression_language/cookbook/prompt_size.ipynb @@ -29,9 +29,9 @@ "from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder\n", "from langchain.prompts.chat import ChatPromptValue\n", "from langchain.tools import WikipediaQueryRun\n", - "from langchain.utilities import WikipediaAPIWrapper\n", "from langchain_community.chat_models import ChatOpenAI\n", - "from langchain_community.tools.convert_to_openai import format_tool_to_openai_function" + "from langchain_community.tools.convert_to_openai import format_tool_to_openai_function\n", + "from langchain_community.utilities import WikipediaAPIWrapper" ] }, { diff --git a/docs/docs/expression_language/cookbook/sql_db.ipynb b/docs/docs/expression_language/cookbook/sql_db.ipynb index 5dc2a31c77b..039d2f26ba0 100644 --- a/docs/docs/expression_language/cookbook/sql_db.ipynb +++ b/docs/docs/expression_language/cookbook/sql_db.ipynb @@ -43,7 +43,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import SQLDatabase" + "from langchain_community.utilities import SQLDatabase" ] }, { diff --git a/docs/docs/guides/evaluation/examples/comparisons.ipynb b/docs/docs/guides/evaluation/examples/comparisons.ipynb index eb12a8773d9..b49776d971b 100644 --- a/docs/docs/guides/evaluation/examples/comparisons.ipynb +++ b/docs/docs/guides/evaluation/examples/comparisons.ipynb @@ -99,8 +99,8 @@ "outputs": [], "source": [ "from langchain.agents import AgentType, Tool, initialize_agent\n", - "from langchain.utilities import SerpAPIWrapper\n", "from langchain_community.chat_models import ChatOpenAI\n", + "from langchain_community.utilities import SerpAPIWrapper\n", "\n", "# Initialize the language model\n", "# You can add your own OpenAI API key by adding openai_api_key=\"\"\n", diff --git a/docs/docs/guides/model_laboratory.ipynb b/docs/docs/guides/model_laboratory.ipynb index f82cc207315..538d4942814 100644 --- a/docs/docs/guides/model_laboratory.ipynb +++ b/docs/docs/guides/model_laboratory.ipynb @@ -141,7 +141,7 @@ "outputs": [], "source": [ "from langchain.chains import SelfAskWithSearchChain\n", - "from langchain.utilities import SerpAPIWrapper\n", + "from langchain_community.utilities import SerpAPIWrapper\n", "\n", "open_ai_llm = OpenAI(temperature=0)\n", "search = SerpAPIWrapper()\n", diff --git a/docs/docs/integrations/chat/huggingface.ipynb b/docs/docs/integrations/chat/huggingface.ipynb index efc5cdfcb6c..2a9b236808f 100644 --- a/docs/docs/integrations/chat/huggingface.ipynb +++ b/docs/docs/integrations/chat/huggingface.ipynb @@ -309,7 +309,7 @@ " ReActJsonSingleInputOutputParser,\n", ")\n", "from langchain.tools.render import render_text_description\n", - "from langchain.utilities import SerpAPIWrapper" + "from langchain_community.utilities import SerpAPIWrapper" ] }, { diff --git a/docs/docs/integrations/llms/bittensor.ipynb b/docs/docs/integrations/llms/bittensor.ipynb index 200a0af3e54..191a84df7ca 100644 --- a/docs/docs/integrations/llms/bittensor.ipynb +++ b/docs/docs/integrations/llms/bittensor.ipynb @@ -119,7 +119,7 @@ "outputs": [], "source": [ "from langchain.tools import Tool\n", - "from langchain.utilities import GoogleSearchAPIWrapper\n", + "from langchain_community.utilities import GoogleSearchAPIWrapper\n", "\n", "search = GoogleSearchAPIWrapper()\n", "\n", diff --git a/docs/docs/integrations/llms/opaqueprompts.ipynb b/docs/docs/integrations/llms/opaqueprompts.ipynb index 936b5d339ea..1ac440c0565 100644 --- a/docs/docs/integrations/llms/opaqueprompts.ipynb +++ b/docs/docs/integrations/llms/opaqueprompts.ipynb @@ -173,7 +173,7 @@ "metadata": {}, "outputs": [], "source": [ - "import langchain.utilities.opaqueprompts as op\n", + "import langchain_community.utilities.opaqueprompts as op\n", "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.runnables import RunnablePassthrough\n", "\n", diff --git a/docs/docs/integrations/memory/zep_memory.ipynb b/docs/docs/integrations/memory/zep_memory.ipynb index 08da5ef96cf..9be30d69364 100644 --- a/docs/docs/integrations/memory/zep_memory.ipynb +++ b/docs/docs/integrations/memory/zep_memory.ipynb @@ -53,8 +53,8 @@ "from langchain.memory import ZepMemory\n", "from langchain.retrievers import ZepRetriever\n", "from langchain.schema import AIMessage, HumanMessage\n", - "from langchain.utilities import WikipediaAPIWrapper\n", "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import WikipediaAPIWrapper\n", "\n", "# Set this to your Zep server URL\n", "ZEP_API_URL = \"http://localhost:8000\"\n", diff --git a/docs/docs/integrations/platforms/google.mdx b/docs/docs/integrations/platforms/google.mdx index c61f0001e0e..bc271d5f12f 100644 --- a/docs/docs/integrations/platforms/google.mdx +++ b/docs/docs/integrations/platforms/google.mdx @@ -333,7 +333,7 @@ pip install google-api-python-client google-auth-httplib2 google-auth-oauthlib See a [usage example and authorization instructions](/docs/integrations/tools/google_drive). ```python -from langchain.utilities.google_drive import GoogleDriveAPIWrapper +from langchain_community.utilities.google_drive import GoogleDriveAPIWrapper from langchain_community.tools.google_drive.tool import GoogleDriveSearchTool ``` @@ -358,7 +358,7 @@ from langchain.tools import GooglePlacesTool `GOOGLE_API_KEY` and `GOOGLE_CSE_ID` respectively. ```python -from langchain.utilities import GoogleSearchAPIWrapper +from langchain_community.utilities import GoogleSearchAPIWrapper ``` For a more detailed walkthrough of this wrapper, see [this notebook](/docs/integrations/tools/google_search). @@ -382,7 +382,7 @@ See a [usage example and authorization instructions](/docs/integrations/tools/go ```python from langchain_community.tools.google_finance import GoogleFinanceQueryRun -from langchain.utilities.google_finance import GoogleFinanceAPIWrapper +from langchain_community.utilities.google_finance import GoogleFinanceAPIWrapper ``` ### Google Jobs @@ -397,7 +397,7 @@ See a [usage example and authorization instructions](/docs/integrations/tools/go ```python from langchain_community.tools.google_jobs import GoogleJobsQueryRun -from langchain.utilities.google_finance import GoogleFinanceAPIWrapper +from langchain_community.utilities.google_finance import GoogleFinanceAPIWrapper ``` ### Google Lens @@ -406,7 +406,7 @@ See a [usage example and authorization instructions](/docs/integrations/tools/go ```python from langchain_community.tools.google_lens import GoogleLensQueryRun -from langchain.utilities.google_lens import GoogleLensAPIWrapper +from langchain_community.utilities.google_lens import GoogleLensAPIWrapper ``` ### Google Scholar @@ -421,7 +421,7 @@ See a [usage example and authorization instructions](/docs/integrations/tools/go ```python from langchain_community.tools.google_scholar import GoogleScholarQueryRun -from langchain.utilities.google_scholar import GoogleScholarAPIWrapper +from langchain_community.utilities.google_scholar import GoogleScholarAPIWrapper ``` ### Google Trends @@ -436,7 +436,7 @@ See a [usage example and authorization instructions](/docs/integrations/tools/go ```python from langchain_community.tools.google_trends import GoogleTrendsQueryRun -from langchain.utilities.google_trends import GoogleTrendsAPIWrapper +from langchain_community.utilities.google_trends import GoogleTrendsAPIWrapper ``` @@ -536,7 +536,7 @@ from langchain_community.chat_loaders.gmail import GMailLoader See [usage examples and authorization instructions](/docs/integrations/tools/searchapi). ```python -from langchain.utilities import SearchApiAPIWrapper +from langchain_community.utilities import SearchApiAPIWrapper ``` ### SerpAPI @@ -546,7 +546,7 @@ from langchain.utilities import SearchApiAPIWrapper See a [usage example and authorization instructions](/docs/integrations/tools/serpapi). ```python -from langchain.utilities import SerpAPIWrapper +from langchain_community.utilities import SerpAPIWrapper ``` ### Serper.dev @@ -554,7 +554,7 @@ from langchain.utilities import SerpAPIWrapper See a [usage example and authorization instructions](/docs/integrations/tools/google_serper). ```python -from langchain.utilities import GoogleSerperAPIWrapper +from langchain_community.utilities import GoogleSerperAPIWrapper ``` ### YouTube diff --git a/docs/docs/integrations/platforms/microsoft.mdx b/docs/docs/integrations/platforms/microsoft.mdx index d7c408c4a6b..e17d3e303a4 100644 --- a/docs/docs/integrations/platforms/microsoft.mdx +++ b/docs/docs/integrations/platforms/microsoft.mdx @@ -272,7 +272,7 @@ from langchain.retrievers import AzureCognitiveSearchRetriever See a [usage example](/docs/integrations/tools/bing_search). ```python -from langchain.utilities import BingSearchAPIWrapper +from langchain_community.utilities import BingSearchAPIWrapper ``` ## Toolkits @@ -317,7 +317,7 @@ See a [usage example](/docs/integrations/toolkits/powerbi). ```python from langchain_community.agent_toolkits import PowerBIToolkit -from langchain.utilities.powerbi import PowerBIDataset +from langchain_community.utilities.powerbi import PowerBIDataset ``` ## More diff --git a/docs/docs/integrations/platforms/openai.mdx b/docs/docs/integrations/platforms/openai.mdx index a8fae1e126b..e74bb6dc80c 100644 --- a/docs/docs/integrations/platforms/openai.mdx +++ b/docs/docs/integrations/platforms/openai.mdx @@ -119,5 +119,5 @@ from langchain.adapters import openai as lc_openai See a [usage example](/docs/integrations/tools/dalle_image_generator). ```python -from langchain.utilities.dalle_image_generator import DallEAPIWrapper +from langchain_community.utilities.dalle_image_generator import DallEAPIWrapper ``` diff --git a/docs/docs/integrations/providers/apify.mdx b/docs/docs/integrations/providers/apify.mdx index 11b684c0da2..a3662589cb8 100644 --- a/docs/docs/integrations/providers/apify.mdx +++ b/docs/docs/integrations/providers/apify.mdx @@ -29,7 +29,7 @@ blogs, or knowledge bases. You can use the `ApifyWrapper` to run Actors on the Apify platform. ```python -from langchain.utilities import ApifyWrapper +from langchain_community.utilities import ApifyWrapper ``` For a more detailed walkthrough of this wrapper, see [this notebook](/docs/integrations/tools/apify). diff --git a/docs/docs/integrations/providers/cnosdb.mdx b/docs/docs/integrations/providers/cnosdb.mdx index c94facbca77..cbb7dfcb076 100644 --- a/docs/docs/integrations/providers/cnosdb.mdx +++ b/docs/docs/integrations/providers/cnosdb.mdx @@ -31,7 +31,7 @@ Args: ## Examples ```python # Connecting to CnosDB with SQLDatabase Wrapper -from langchain.utilities import SQLDatabase +from langchain_community.utilities import SQLDatabase db = SQLDatabase.from_cnosdb() ``` @@ -45,7 +45,7 @@ llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo") ### SQL Database Chain This example demonstrates the use of the SQL Chain for answering a question over a CnosDB. ```python -from langchain.utilities import SQLDatabaseChain +from langchain_community.utilities import SQLDatabaseChain db_chain = SQLDatabaseChain.from_llm(llm, db, verbose=True) diff --git a/docs/docs/integrations/providers/dataforseo.mdx b/docs/docs/integrations/providers/dataforseo.mdx index 339daaada2b..3be8ed0f2be 100644 --- a/docs/docs/integrations/providers/dataforseo.mdx +++ b/docs/docs/integrations/providers/dataforseo.mdx @@ -22,7 +22,7 @@ os.environ["DATAFORSEO_PASSWORD"] = "your_password" The DataForSEO utility wraps the API. To import this utility, use: ```python -from langchain.utilities.dataforseo_api_search import DataForSeoAPIWrapper +from langchain_community.utilities.dataforseo_api_search import DataForSeoAPIWrapper ``` For a detailed walkthrough of this wrapper, see [this notebook](/docs/integrations/tools/dataforseo). diff --git a/docs/docs/integrations/providers/golden.mdx b/docs/docs/integrations/providers/golden.mdx index 5f1c5c15703..3be3e017ce5 100644 --- a/docs/docs/integrations/providers/golden.mdx +++ b/docs/docs/integrations/providers/golden.mdx @@ -17,7 +17,7 @@ There exists a GoldenQueryAPIWrapper utility which wraps this API. To import this utility: ```python -from langchain.utilities.golden_query import GoldenQueryAPIWrapper +from langchain_community.utilities.golden_query import GoldenQueryAPIWrapper ``` For a more detailed walkthrough of this wrapper, see [this notebook](/docs/integrations/tools/golden_query). diff --git a/docs/docs/integrations/providers/google_serper.mdx b/docs/docs/integrations/providers/google_serper.mdx index f091d95ebe5..ec3e4e83dae 100644 --- a/docs/docs/integrations/providers/google_serper.mdx +++ b/docs/docs/integrations/providers/google_serper.mdx @@ -14,13 +14,13 @@ It is broken into two parts: setup, and then references to the specific Google S There exists a GoogleSerperAPIWrapper utility which wraps this API. To import this utility: ```python -from langchain.utilities import GoogleSerperAPIWrapper +from langchain_community.utilities import GoogleSerperAPIWrapper ``` You can use it as part of a Self Ask chain: ```python -from langchain.utilities import GoogleSerperAPIWrapper +from langchain_community.utilities import GoogleSerperAPIWrapper from langchain_community.llms.openai import OpenAI from langchain.agents import initialize_agent, Tool from langchain.agents import AgentType diff --git a/docs/docs/integrations/providers/openweathermap.mdx b/docs/docs/integrations/providers/openweathermap.mdx index b38fe5db264..35344e2a118 100644 --- a/docs/docs/integrations/providers/openweathermap.mdx +++ b/docs/docs/integrations/providers/openweathermap.mdx @@ -26,7 +26,7 @@ pip install pyowm There exists a OpenWeatherMapAPIWrapper utility which wraps this API. To import this utility: ```python -from langchain.utilities.openweathermap import OpenWeatherMapAPIWrapper +from langchain_community.utilities.openweathermap import OpenWeatherMapAPIWrapper ``` For a more detailed walkthrough of this wrapper, see [this notebook](/docs/integrations/tools/openweathermap). diff --git a/docs/docs/integrations/providers/portkey/index.md b/docs/docs/integrations/providers/portkey/index.md index bed90b08586..73b4ed49995 100644 --- a/docs/docs/integrations/providers/portkey/index.md +++ b/docs/docs/integrations/providers/portkey/index.md @@ -21,7 +21,7 @@ To start, get your Portkey API key by [signing up here](https://app.portkey.ai/l For OpenAI, a simple integration with logging feature would look like this: ```python from langchain_community.llms import OpenAI -from langchain.utilities import Portkey +from langchain_community.utilities import Portkey # Add the Portkey API Key from your account headers = Portkey.Config( @@ -40,7 +40,7 @@ A common Portkey X Langchain use case is to **trace a chain or an agent** and vi ```python from langchain.agents import AgentType, initialize_agent, load_tools from langchain_community.llms import OpenAI -from langchain.utilities import Portkey +from langchain_community.utilities import Portkey # Add the Portkey API Key from your account headers = Portkey.Config( diff --git a/docs/docs/integrations/providers/portkey/logging_tracing_portkey.ipynb b/docs/docs/integrations/providers/portkey/logging_tracing_portkey.ipynb index 7cb1e83091c..2eb407cb5c0 100644 --- a/docs/docs/integrations/providers/portkey/logging_tracing_portkey.ipynb +++ b/docs/docs/integrations/providers/portkey/logging_tracing_portkey.ipynb @@ -27,8 +27,8 @@ "import os\n", "\n", "from langchain.agents import AgentType, initialize_agent, load_tools\n", - "from langchain.utilities import Portkey\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import Portkey" ] }, { diff --git a/docs/docs/integrations/providers/searchapi.mdx b/docs/docs/integrations/providers/searchapi.mdx index 29d59b49b1a..8370d0eca51 100644 --- a/docs/docs/integrations/providers/searchapi.mdx +++ b/docs/docs/integrations/providers/searchapi.mdx @@ -14,13 +14,13 @@ This page covers how to use the [SearchApi](https://www.searchapi.io/) Google Se There is a SearchApiAPIWrapper utility which wraps this API. To import this utility: ```python -from langchain.utilities import SearchApiAPIWrapper +from langchain_community.utilities import SearchApiAPIWrapper ``` You can use it as part of a Self Ask chain: ```python -from langchain.utilities import SearchApiAPIWrapper +from langchain_community.utilities import SearchApiAPIWrapper from langchain_community.llms.openai import OpenAI from langchain.agents import initialize_agent, Tool from langchain.agents import AgentType diff --git a/docs/docs/integrations/providers/searx.mdx b/docs/docs/integrations/providers/searx.mdx index 63a2de64f26..6bc01913d42 100644 --- a/docs/docs/integrations/providers/searx.mdx +++ b/docs/docs/integrations/providers/searx.mdx @@ -40,7 +40,7 @@ To use the wrapper we need to pass the host of the SearxNG instance to the wrapp You can use the wrapper to get results from a SearxNG instance. ```python -from langchain.utilities import SearxSearchWrapper +from langchain_community.utilities import SearxSearchWrapper s = SearxSearchWrapper(searx_host="http://localhost:8888") s.run("what is a large language model?") ``` diff --git a/docs/docs/integrations/providers/serpapi.mdx b/docs/docs/integrations/providers/serpapi.mdx index 0e31f0e6cf6..5bf5ec7fa68 100644 --- a/docs/docs/integrations/providers/serpapi.mdx +++ b/docs/docs/integrations/providers/serpapi.mdx @@ -14,7 +14,7 @@ It is broken into two parts: installation and setup, and then references to the There exists a SerpAPI utility which wraps this API. To import this utility: ```python -from langchain.utilities import SerpAPIWrapper +from langchain_community.utilities import SerpAPIWrapper ``` For a more detailed walkthrough of this wrapper, see [this notebook](/docs/integrations/tools/serpapi). diff --git a/docs/docs/integrations/providers/stackexchange.mdx b/docs/docs/integrations/providers/stackexchange.mdx index be31468c14c..666b4337e49 100644 --- a/docs/docs/integrations/providers/stackexchange.mdx +++ b/docs/docs/integrations/providers/stackexchange.mdx @@ -19,7 +19,7 @@ pip install stackapi There exists a StackExchangeAPIWrapper utility which wraps this API. To import this utility: ```python -from langchain.utilities import StackExchangeAPIWrapper +from langchain_community.utilities import StackExchangeAPIWrapper ``` For a more detailed walkthrough of this wrapper, see [this notebook](/docs/integrations/tools/stackexchange). diff --git a/docs/docs/integrations/providers/wolfram_alpha.mdx b/docs/docs/integrations/providers/wolfram_alpha.mdx index 5e25d5e2dbf..24ae6d68f4a 100644 --- a/docs/docs/integrations/providers/wolfram_alpha.mdx +++ b/docs/docs/integrations/providers/wolfram_alpha.mdx @@ -22,7 +22,7 @@ pip install wolframalpha There exists a WolframAlphaAPIWrapper utility which wraps this API. To import this utility: ```python -from langchain.utilities.wolfram_alpha import WolframAlphaAPIWrapper +from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper ``` For a more detailed walkthrough of this wrapper, see [this notebook](/docs/integrations/tools/wolfram_alpha). diff --git a/docs/docs/integrations/retrievers/outline.ipynb b/docs/docs/integrations/retrievers/outline.ipynb index a47267de3cb..e0360571ec8 100644 --- a/docs/docs/integrations/retrievers/outline.ipynb +++ b/docs/docs/integrations/retrievers/outline.ipynb @@ -89,7 +89,7 @@ { "data": { "text/plain": [ - "[Document(page_content='This walkthrough demonstrates how to use an agent optimized for conversation. Other agents are often optimized for using tools to figure out the best response, which is not ideal in a conversational setting where you may want the agent to be able to chat with the user as well.\\n\\nIf we compare it to the standard ReAct agent, the main difference is the prompt. We want it to be much more conversational.\\n\\nfrom langchain.agents import AgentType, Tool, initialize_agent\\n\\nfrom langchain_community.llms import OpenAI\\n\\nfrom langchain.memory import ConversationBufferMemory\\n\\nfrom langchain.utilities import SerpAPIWrapper\\n\\nsearch = SerpAPIWrapper() tools = \\\\[ Tool( name=\"Current Search\", func=search.run, description=\"useful for when you need to answer questions about current events or the current state of the world\", ), \\\\]\\n\\n\\\\\\nllm = OpenAI(temperature=0)\\n\\nUsing LCEL\\n\\nWe will first show how to create this agent using LCEL\\n\\nfrom langchain import hub\\n\\nfrom langchain.agents.format_scratchpad import format_log_to_str\\n\\nfrom langchain.agents.output_parsers import ReActSingleInputOutputParser\\n\\nfrom langchain.tools.render import render_text_description\\n\\nprompt = hub.pull(\"hwchase17/react-chat\")\\n\\nprompt = prompt.partial( tools=render_text_description(tools), tool_names=\", \".join(\\\\[[t.name](http://t.name) for t in tools\\\\]), )\\n\\nllm_with_stop = llm.bind(stop=\\\\[\"\\\\nObservation\"\\\\])\\n\\nagent = ( { \"input\": lambda x: x\\\\[\"input\"\\\\], \"agent_scratchpad\": lambda x: format_log_to_str(x\\\\[\"intermediate_steps\"\\\\]), \"chat_history\": lambda x: x\\\\[\"chat_history\"\\\\], } | prompt | llm_with_stop | ReActSingleInputOutputParser() )\\n\\nfrom langchain.agents import AgentExecutor\\n\\nmemory = ConversationBufferMemory(memory_key=\"chat_history\") agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True, memory=memory)\\n\\nagent_executor.invoke({\"input\": \"hi, i am bob\"})\\\\[\"output\"\\\\]\\n\\n```\\n> Entering new AgentExecutor chain...\\n\\nThought: Do I need to use a tool? No\\nFinal Answer: Hi Bob, nice to meet you! How can I help you today?\\n\\n> Finished chain.\\n```\\n\\n\\\\\\n\\'Hi Bob, nice to meet you! How can I help you today?\\'\\n\\nagent_executor.invoke({\"input\": \"whats my name?\"})\\\\[\"output\"\\\\]\\n\\n```\\n> Entering new AgentExecutor chain...\\n\\nThought: Do I need to use a tool? No\\nFinal Answer: Your name is Bob.\\n\\n> Finished chain.\\n```\\n\\n\\\\\\n\\'Your name is Bob.\\'\\n\\nagent_executor.invoke({\"input\": \"what are some movies showing 9/21/2023?\"})\\\\[\"output\"\\\\]\\n\\n```\\n> Entering new AgentExecutor chain...\\n\\nThought: Do I need to use a tool? Yes\\nAction: Current Search\\nAction Input: Movies showing 9/21/2023[\\'September 2023 Movies: The Creator • Dumb Money • Expend4bles • The Kill Room • The Inventor • The Equalizer 3 • PAW Patrol: The Mighty Movie, ...\\'] Do I need to use a tool? No\\nFinal Answer: According to current search, some movies showing on 9/21/2023 are The Creator, Dumb Money, Expend4bles, The Kill Room, The Inventor, The Equalizer 3, and PAW Patrol: The Mighty Movie.\\n\\n> Finished chain.\\n```\\n\\n\\\\\\n\\'According to current search, some movies showing on 9/21/2023 are The Creator, Dumb Money, Expend4bles, The Kill Room, The Inventor, The Equalizer 3, and PAW Patrol: The Mighty Movie.\\'\\n\\n\\\\\\nUse the off-the-shelf agent\\n\\nWe can also create this agent using the off-the-shelf agent class\\n\\nagent_executor = initialize_agent( tools, llm, agent=AgentType.CONVERSATIONAL_REACT_DESCRIPTION, verbose=True, memory=memory, )\\n\\nUse a chat model\\n\\nWe can also use a chat model here. The main difference here is in the prompts used.\\n\\nfrom langchain import hub\\n\\nfrom langchain_community.chat_models import ChatOpenAI\\n\\nprompt = hub.pull(\"hwchase17/react-chat-json\") chat_model = ChatOpenAI(temperature=0, model=\"gpt-4\")\\n\\nprompt = prompt.partial( tools=render_text_description(tools), tool_names=\", \".join(\\\\[[t.name](http://t.name) for t in tools\\\\]), )\\n\\nchat_model_with_stop = chat_model.bind(stop=\\\\[\"\\\\nObservation\"\\\\])\\n\\nfrom langchain.agents.format_scratchpad import format_log_to_messages\\n\\nfrom langchain.agents.output_parsers import JSONAgentOutputParser\\n\\n# We need some extra steering, or the c', metadata={'title': 'Conversational', 'source': 'https://d01.getoutline.com/doc/conversational-B5dBkUgQ4b'}),\n", + "[Document(page_content='This walkthrough demonstrates how to use an agent optimized for conversation. Other agents are often optimized for using tools to figure out the best response, which is not ideal in a conversational setting where you may want the agent to be able to chat with the user as well.\\n\\nIf we compare it to the standard ReAct agent, the main difference is the prompt. We want it to be much more conversational.\\n\\nfrom langchain.agents import AgentType, Tool, initialize_agent\\n\\nfrom langchain_community.llms import OpenAI\\n\\nfrom langchain.memory import ConversationBufferMemory\\n\\nfrom langchain_community.utilities import SerpAPIWrapper\\n\\nsearch = SerpAPIWrapper() tools = \\\\[ Tool( name=\"Current Search\", func=search.run, description=\"useful for when you need to answer questions about current events or the current state of the world\", ), \\\\]\\n\\n\\\\\\nllm = OpenAI(temperature=0)\\n\\nUsing LCEL\\n\\nWe will first show how to create this agent using LCEL\\n\\nfrom langchain import hub\\n\\nfrom langchain.agents.format_scratchpad import format_log_to_str\\n\\nfrom langchain.agents.output_parsers import ReActSingleInputOutputParser\\n\\nfrom langchain.tools.render import render_text_description\\n\\nprompt = hub.pull(\"hwchase17/react-chat\")\\n\\nprompt = prompt.partial( tools=render_text_description(tools), tool_names=\", \".join(\\\\[[t.name](http://t.name) for t in tools\\\\]), )\\n\\nllm_with_stop = llm.bind(stop=\\\\[\"\\\\nObservation\"\\\\])\\n\\nagent = ( { \"input\": lambda x: x\\\\[\"input\"\\\\], \"agent_scratchpad\": lambda x: format_log_to_str(x\\\\[\"intermediate_steps\"\\\\]), \"chat_history\": lambda x: x\\\\[\"chat_history\"\\\\], } | prompt | llm_with_stop | ReActSingleInputOutputParser() )\\n\\nfrom langchain.agents import AgentExecutor\\n\\nmemory = ConversationBufferMemory(memory_key=\"chat_history\") agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True, memory=memory)\\n\\nagent_executor.invoke({\"input\": \"hi, i am bob\"})\\\\[\"output\"\\\\]\\n\\n```\\n> Entering new AgentExecutor chain...\\n\\nThought: Do I need to use a tool? No\\nFinal Answer: Hi Bob, nice to meet you! How can I help you today?\\n\\n> Finished chain.\\n```\\n\\n\\\\\\n\\'Hi Bob, nice to meet you! How can I help you today?\\'\\n\\nagent_executor.invoke({\"input\": \"whats my name?\"})\\\\[\"output\"\\\\]\\n\\n```\\n> Entering new AgentExecutor chain...\\n\\nThought: Do I need to use a tool? No\\nFinal Answer: Your name is Bob.\\n\\n> Finished chain.\\n```\\n\\n\\\\\\n\\'Your name is Bob.\\'\\n\\nagent_executor.invoke({\"input\": \"what are some movies showing 9/21/2023?\"})\\\\[\"output\"\\\\]\\n\\n```\\n> Entering new AgentExecutor chain...\\n\\nThought: Do I need to use a tool? Yes\\nAction: Current Search\\nAction Input: Movies showing 9/21/2023[\\'September 2023 Movies: The Creator • Dumb Money • Expend4bles • The Kill Room • The Inventor • The Equalizer 3 • PAW Patrol: The Mighty Movie, ...\\'] Do I need to use a tool? No\\nFinal Answer: According to current search, some movies showing on 9/21/2023 are The Creator, Dumb Money, Expend4bles, The Kill Room, The Inventor, The Equalizer 3, and PAW Patrol: The Mighty Movie.\\n\\n> Finished chain.\\n```\\n\\n\\\\\\n\\'According to current search, some movies showing on 9/21/2023 are The Creator, Dumb Money, Expend4bles, The Kill Room, The Inventor, The Equalizer 3, and PAW Patrol: The Mighty Movie.\\'\\n\\n\\\\\\nUse the off-the-shelf agent\\n\\nWe can also create this agent using the off-the-shelf agent class\\n\\nagent_executor = initialize_agent( tools, llm, agent=AgentType.CONVERSATIONAL_REACT_DESCRIPTION, verbose=True, memory=memory, )\\n\\nUse a chat model\\n\\nWe can also use a chat model here. The main difference here is in the prompts used.\\n\\nfrom langchain import hub\\n\\nfrom langchain_community.chat_models import ChatOpenAI\\n\\nprompt = hub.pull(\"hwchase17/react-chat-json\") chat_model = ChatOpenAI(temperature=0, model=\"gpt-4\")\\n\\nprompt = prompt.partial( tools=render_text_description(tools), tool_names=\", \".join(\\\\[[t.name](http://t.name) for t in tools\\\\]), )\\n\\nchat_model_with_stop = chat_model.bind(stop=\\\\[\"\\\\nObservation\"\\\\])\\n\\nfrom langchain.agents.format_scratchpad import format_log_to_messages\\n\\nfrom langchain.agents.output_parsers import JSONAgentOutputParser\\n\\n# We need some extra steering, or the c', metadata={'title': 'Conversational', 'source': 'https://d01.getoutline.com/doc/conversational-B5dBkUgQ4b'}),\n", " Document(page_content='Quickstart\\n\\nIn this quickstart we\\'ll show you how to:\\n\\nGet setup with LangChain, LangSmith and LangServe\\n\\nUse the most basic and common components of LangChain: prompt templates, models, and output parsers\\n\\nUse LangChain Expression Language, the protocol that LangChain is built on and which facilitates component chaining\\n\\nBuild a simple application with LangChain\\n\\nTrace your application with LangSmith\\n\\nServe your application with LangServe\\n\\nThat\\'s a fair amount to cover! Let\\'s dive in.\\n\\nSetup\\n\\nInstallation\\n\\nTo install LangChain run:\\n\\nPip\\n\\nConda\\n\\npip install langchain\\n\\nFor more details, see our Installation guide.\\n\\nEnvironment\\n\\nUsing LangChain will usually require integrations with one or more model providers, data stores, APIs, etc. For this example, we\\'ll use OpenAI\\'s model APIs.\\n\\nFirst we\\'ll need to install their Python package:\\n\\npip install openai\\n\\nAccessing the API requires an API key, which you can get by creating an account and heading here. Once we have a key we\\'ll want to set it as an environment variable by running:\\n\\nexport OPENAI_API_KEY=\"...\"\\n\\nIf you\\'d prefer not to set an environment variable you can pass the key in directly via the openai_api_key named parameter when initiating the OpenAI LLM class:\\n\\nfrom langchain_community.chat_models import ChatOpenAI\\n\\nllm = ChatOpenAI(openai_api_key=\"...\")\\n\\nLangSmith\\n\\nMany of the applications you build with LangChain will contain multiple steps with multiple invocations of LLM calls. As these applications get more and more complex, it becomes crucial to be able to inspect what exactly is going on inside your chain or agent. The best way to do this is with LangSmith.\\n\\nNote that LangSmith is not needed, but it is helpful. If you do want to use LangSmith, after you sign up at the link above, make sure to set your environment variables to start logging traces:\\n\\nexport LANGCHAIN_TRACING_V2=\"true\" export LANGCHAIN_API_KEY=...\\n\\nLangServe\\n\\nLangServe helps developers deploy LangChain chains as a REST API. You do not need to use LangServe to use LangChain, but in this guide we\\'ll show how you can deploy your app with LangServe.\\n\\nInstall with:\\n\\npip install \"langserve\\\\[all\\\\]\"\\n\\nBuilding with LangChain\\n\\nLangChain provides many modules that can be used to build language model applications. Modules can be used as standalones in simple applications and they can be composed for more complex use cases. Composition is powered by LangChain Expression Language (LCEL), which defines a unified Runnable interface that many modules implement, making it possible to seamlessly chain components.\\n\\nThe simplest and most common chain contains three things:\\n\\nLLM/Chat Model: The language model is the core reasoning engine here. In order to work with LangChain, you need to understand the different types of language models and how to work with them. Prompt Template: This provides instructions to the language model. This controls what the language model outputs, so understanding how to construct prompts and different prompting strategies is crucial. Output Parser: These translate the raw response from the language model to a more workable format, making it easy to use the output downstream. In this guide we\\'ll cover those three components individually, and then go over how to combine them. Understanding these concepts will set you up well for being able to use and customize LangChain applications. Most LangChain applications allow you to configure the model and/or the prompt, so knowing how to take advantage of this will be a big enabler.\\n\\nLLM / Chat Model\\n\\nThere are two types of language models:\\n\\nLLM: underlying model takes a string as input and returns a string\\n\\nChatModel: underlying model takes a list of messages as input and returns a message\\n\\nStrings are simple, but what exactly are messages? The base message interface is defined by BaseMessage, which has two required attributes:\\n\\ncontent: The content of the message. Usually a string. role: The entity from which the BaseMessage is coming. LangChain provides several ob', metadata={'title': 'Quick Start', 'source': 'https://d01.getoutline.com/doc/quick-start-jGuGGGOTuL'}),\n", " Document(page_content='This walkthrough showcases using an agent to implement the [ReAct](https://react-lm.github.io/) logic.\\n\\n```javascript\\nfrom langchain.agents import AgentType, initialize_agent, load_tools\\nfrom langchain_community.llms import OpenAI\\n```\\n\\nFirst, let\\'s load the language model we\\'re going to use to control the agent.\\n\\n```javascript\\nllm = OpenAI(temperature=0)\\n```\\n\\nNext, let\\'s load some tools to use. Note that the llm-math tool uses an LLM, so we need to pass that in.\\n\\n```javascript\\ntools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\\n```\\n\\n## Using LCEL[\\u200b](https://python.langchain.com/docs/modules/agents/agent_types/react#using-lcel \"Direct link to Using LCEL\")\\n\\nWe will first show how to create the agent using LCEL\\n\\n```javascript\\nfrom langchain import hub\\nfrom langchain.agents.format_scratchpad import format_log_to_str\\nfrom langchain.agents.output_parsers import ReActSingleInputOutputParser\\nfrom langchain.tools.render import render_text_description\\n```\\n\\n```javascript\\nprompt = hub.pull(\"hwchase17/react\")\\nprompt = prompt.partial(\\n tools=render_text_description(tools),\\n tool_names=\", \".join([t.name for t in tools]),\\n)\\n```\\n\\n```javascript\\nllm_with_stop = llm.bind(stop=[\"\\\\nObservation\"])\\n```\\n\\n```javascript\\nagent = (\\n {\\n \"input\": lambda x: x[\"input\"],\\n \"agent_scratchpad\": lambda x: format_log_to_str(x[\"intermediate_steps\"]),\\n }\\n | prompt\\n | llm_with_stop\\n | ReActSingleInputOutputParser()\\n)\\n```\\n\\n```javascript\\nfrom langchain.agents import AgentExecutor\\n```\\n\\n```javascript\\nagent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)\\n```\\n\\n```javascript\\nagent_executor.invoke(\\n {\\n \"input\": \"Who is Leo DiCaprio\\'s girlfriend? What is her current age raised to the 0.43 power?\"\\n }\\n)\\n```\\n\\n```javascript\\n \\n \\n > Entering new AgentExecutor chain...\\n I need to find out who Leo DiCaprio\\'s girlfriend is and then calculate her age raised to the 0.43 power.\\n Action: Search\\n Action Input: \"Leo DiCaprio girlfriend\"model Vittoria Ceretti I need to find out Vittoria Ceretti\\'s age\\n Action: Search\\n Action Input: \"Vittoria Ceretti age\"25 years I need to calculate 25 raised to the 0.43 power\\n Action: Calculator\\n Action Input: 25^0.43Answer: 3.991298452658078 I now know the final answer\\n Final Answer: Leo DiCaprio\\'s girlfriend is Vittoria Ceretti and her current age raised to the 0.43 power is 3.991298452658078.\\n \\n > Finished chain.\\n\\n\\n\\n\\n\\n {\\'input\\': \"Who is Leo DiCaprio\\'s girlfriend? What is her current age raised to the 0.43 power?\",\\n \\'output\\': \"Leo DiCaprio\\'s girlfriend is Vittoria Ceretti and her current age raised to the 0.43 power is 3.991298452658078.\"}\\n```\\n\\n## Using ZeroShotReactAgent[\\u200b](https://python.langchain.com/docs/modules/agents/agent_types/react#using-zeroshotreactagent \"Direct link to Using ZeroShotReactAgent\")\\n\\nWe will now show how to use the agent with an off-the-shelf agent implementation\\n\\n```javascript\\nagent_executor = initialize_agent(\\n tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True\\n)\\n```\\n\\n```javascript\\nagent_executor.invoke(\\n {\\n \"input\": \"Who is Leo DiCaprio\\'s girlfriend? What is her current age raised to the 0.43 power?\"\\n }\\n)\\n```\\n\\n```javascript\\n \\n \\n > Entering new AgentExecutor chain...\\n I need to find out who Leo DiCaprio\\'s girlfriend is and then calculate her age raised to the 0.43 power.\\n Action: Search\\n Action Input: \"Leo DiCaprio girlfriend\"\\n Observation: model Vittoria Ceretti\\n Thought: I need to find out Vittoria Ceretti\\'s age\\n Action: Search\\n Action Input: \"Vittoria Ceretti age\"\\n Observation: 25 years\\n Thought: I need to calculate 25 raised to the 0.43 power\\n Action: Calculator\\n Action Input: 25^0.43\\n Observation: Answer: 3.991298452658078\\n Thought: I now know the final answer\\n Final Answer: Leo DiCaprio\\'s girlfriend is Vittoria Ceretti and her current age raised to the 0.43 power is 3.991298452658078.\\n \\n > Finished chain.\\n\\n\\n\\n\\n\\n {\\'input\\': \"Who is L', metadata={'title': 'ReAct', 'source': 'https://d01.getoutline.com/doc/react-d6rxRS1MHk'})]" ] diff --git a/docs/docs/integrations/toolkits/clickup.ipynb b/docs/docs/integrations/toolkits/clickup.ipynb index 53e503ca02b..700c5ba42f1 100644 --- a/docs/docs/integrations/toolkits/clickup.ipynb +++ b/docs/docs/integrations/toolkits/clickup.ipynb @@ -22,9 +22,9 @@ "from datetime import datetime\n", "\n", "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.utilities.clickup import ClickupAPIWrapper\n", "from langchain_community.agent_toolkits.clickup.toolkit import ClickupToolkit\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities.clickup import ClickupAPIWrapper" ] }, { @@ -797,7 +797,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/rodrigolentini/repos/langchain-clickup/libs/langchain/langchain/utilities/clickup.py:145: UserWarning: Error encountered while trying to parse : 'NoneType' object is not subscriptable\n", + "/Users/rodrigolentini/repos/langchain-clickup/libs/langchain/langchain/utilities/clickup.py:145: UserWarning: Error encountered while trying to parse : 'NoneType' object is not subscriptable\n", " Falling back to returning input data.\n", " warnings.warn(f'Error encountered while trying to parse {dataclass}: {e}\\n Falling back to returning input data.')\n" ] diff --git a/docs/docs/integrations/toolkits/github.ipynb b/docs/docs/integrations/toolkits/github.ipynb index 70720895621..fc0af1a813f 100644 --- a/docs/docs/integrations/toolkits/github.ipynb +++ b/docs/docs/integrations/toolkits/github.ipynb @@ -107,9 +107,9 @@ "import os\n", "\n", "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.utilities.github import GitHubAPIWrapper\n", "from langchain_community.agent_toolkits.github.toolkit import GitHubToolkit\n", - "from langchain_community.chat_models import ChatOpenAI" + "from langchain_community.chat_models import ChatOpenAI\n", + "from langchain_community.utilities.github import GitHubAPIWrapper" ] }, { diff --git a/docs/docs/integrations/toolkits/gitlab.ipynb b/docs/docs/integrations/toolkits/gitlab.ipynb index 3a6d2af29e5..8d8d9afde39 100644 --- a/docs/docs/integrations/toolkits/gitlab.ipynb +++ b/docs/docs/integrations/toolkits/gitlab.ipynb @@ -101,9 +101,9 @@ "import os\n", "\n", "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.utilities.gitlab import GitLabAPIWrapper\n", "from langchain_community.agent_toolkits.gitlab.toolkit import GitLabToolkit\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities.gitlab import GitLabAPIWrapper" ] }, { diff --git a/docs/docs/integrations/toolkits/jira.ipynb b/docs/docs/integrations/toolkits/jira.ipynb index 801c3aa2e5e..dd2ff47aabf 100644 --- a/docs/docs/integrations/toolkits/jira.ipynb +++ b/docs/docs/integrations/toolkits/jira.ipynb @@ -50,9 +50,9 @@ "import os\n", "\n", "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.utilities.jira import JiraAPIWrapper\n", "from langchain_community.agent_toolkits.jira.toolkit import JiraToolkit\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities.jira import JiraAPIWrapper" ] }, { diff --git a/docs/docs/integrations/toolkits/nasa.ipynb b/docs/docs/integrations/toolkits/nasa.ipynb index 7b2f7fa35b8..66786a6b300 100644 --- a/docs/docs/integrations/toolkits/nasa.ipynb +++ b/docs/docs/integrations/toolkits/nasa.ipynb @@ -30,9 +30,9 @@ "outputs": [], "source": [ "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.utilities.nasa import NasaAPIWrapper\n", "from langchain_community.agent_toolkits.nasa.toolkit import NasaToolkit\n", "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities.nasa import NasaAPIWrapper\n", "\n", "llm = OpenAI(temperature=0, openai_api_key=\"\")\n", "nasa = NasaAPIWrapper()\n", diff --git a/docs/docs/integrations/toolkits/powerbi.ipynb b/docs/docs/integrations/toolkits/powerbi.ipynb index 985efc60305..fc90997e41d 100644 --- a/docs/docs/integrations/toolkits/powerbi.ipynb +++ b/docs/docs/integrations/toolkits/powerbi.ipynb @@ -38,9 +38,9 @@ "outputs": [], "source": [ "from azure.identity import DefaultAzureCredential\n", - "from langchain.utilities.powerbi import PowerBIDataset\n", "from langchain_community.agent_toolkits import PowerBIToolkit, create_pbi_agent\n", - "from langchain_community.chat_models import ChatOpenAI" + "from langchain_community.chat_models import ChatOpenAI\n", + "from langchain_community.utilities.powerbi import PowerBIDataset" ] }, { diff --git a/docs/docs/integrations/toolkits/spark_sql.ipynb b/docs/docs/integrations/toolkits/spark_sql.ipynb index 496865497a5..f6dfa6daa68 100644 --- a/docs/docs/integrations/toolkits/spark_sql.ipynb +++ b/docs/docs/integrations/toolkits/spark_sql.ipynb @@ -25,9 +25,9 @@ "outputs": [], "source": [ "from langchain.agents import create_spark_sql_agent\n", - "from langchain.utilities.spark_sql import SparkSQL\n", "from langchain_community.agent_toolkits import SparkSQLToolkit\n", - "from langchain_community.chat_models import ChatOpenAI" + "from langchain_community.chat_models import ChatOpenAI\n", + "from langchain_community.utilities.spark_sql import SparkSQL" ] }, { diff --git a/docs/docs/integrations/toolkits/steam.ipynb b/docs/docs/integrations/toolkits/steam.ipynb index 72edde402a3..b85c8b10318 100644 --- a/docs/docs/integrations/toolkits/steam.ipynb +++ b/docs/docs/integrations/toolkits/steam.ipynb @@ -75,9 +75,9 @@ "outputs": [], "source": [ "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.utilities.steam import SteamWebAPIWrapper\n", "from langchain_community.agent_toolkits.steam.toolkit import SteamToolkit\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities.steam import SteamWebAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/alpha_vantage.ipynb b/docs/docs/integrations/tools/alpha_vantage.ipynb index 08cd1e7ad99..958ae9e4959 100644 --- a/docs/docs/integrations/tools/alpha_vantage.ipynb +++ b/docs/docs/integrations/tools/alpha_vantage.ipynb @@ -46,7 +46,7 @@ }, "outputs": [], "source": [ - "from langchain.utilities.alpha_vantage import AlphaVantageAPIWrapper" + "from langchain_community.utilities.alpha_vantage import AlphaVantageAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/apify.ipynb b/docs/docs/integrations/tools/apify.ipynb index 3c647c0a258..3e667a687ae 100644 --- a/docs/docs/integrations/tools/apify.ipynb +++ b/docs/docs/integrations/tools/apify.ipynb @@ -41,8 +41,8 @@ "outputs": [], "source": [ "from langchain.indexes import VectorstoreIndexCreator\n", - "from langchain.utilities import ApifyWrapper\n", - "from langchain_community.document_loaders.base import Document" + "from langchain_community.document_loaders.base import Document\n", + "from langchain_community.utilities import ApifyWrapper" ] }, { diff --git a/docs/docs/integrations/tools/arxiv.ipynb b/docs/docs/integrations/tools/arxiv.ipynb index e9eb2bd0818..5b57135ce6a 100644 --- a/docs/docs/integrations/tools/arxiv.ipynb +++ b/docs/docs/integrations/tools/arxiv.ipynb @@ -122,7 +122,7 @@ }, "outputs": [], "source": [ - "from langchain.utilities import ArxivAPIWrapper" + "from langchain_community.utilities import ArxivAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/bing_search.ipynb b/docs/docs/integrations/tools/bing_search.ipynb index f3127b30522..c91d5dbab4a 100644 --- a/docs/docs/integrations/tools/bing_search.ipynb +++ b/docs/docs/integrations/tools/bing_search.ipynb @@ -38,7 +38,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import BingSearchAPIWrapper" + "from langchain_community.utilities import BingSearchAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/dalle_image_generator.ipynb b/docs/docs/integrations/tools/dalle_image_generator.ipynb index 3819f310c19..939f6fb461b 100644 --- a/docs/docs/integrations/tools/dalle_image_generator.ipynb +++ b/docs/docs/integrations/tools/dalle_image_generator.ipynb @@ -53,8 +53,8 @@ "source": [ "from langchain.chains import LLMChain\n", "from langchain.prompts import PromptTemplate\n", - "from langchain.utilities.dalle_image_generator import DallEAPIWrapper\n", "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities.dalle_image_generator import DallEAPIWrapper\n", "\n", "llm = OpenAI(temperature=0.9)\n", "prompt = PromptTemplate(\n", diff --git a/docs/docs/integrations/tools/dataforseo.ipynb b/docs/docs/integrations/tools/dataforseo.ipynb index 51ed7468692..a7fd208d6fa 100644 --- a/docs/docs/integrations/tools/dataforseo.ipynb +++ b/docs/docs/integrations/tools/dataforseo.ipynb @@ -19,7 +19,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities.dataforseo_api_search import DataForSeoAPIWrapper" + "from langchain_community.utilities.dataforseo_api_search import DataForSeoAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/ddg.ipynb b/docs/docs/integrations/tools/ddg.ipynb index 2d8daefcfa4..9c515464088 100644 --- a/docs/docs/integrations/tools/ddg.ipynb +++ b/docs/docs/integrations/tools/ddg.ipynb @@ -164,7 +164,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import DuckDuckGoSearchAPIWrapper\n", + "from langchain_community.utilities import DuckDuckGoSearchAPIWrapper\n", "\n", "wrapper = DuckDuckGoSearchAPIWrapper(region=\"de-de\", time=\"d\", max_results=2)" ] diff --git a/docs/docs/integrations/tools/golden_query.ipynb b/docs/docs/integrations/tools/golden_query.ipynb index e456434afef..e734b6a1f40 100644 --- a/docs/docs/integrations/tools/golden_query.ipynb +++ b/docs/docs/integrations/tools/golden_query.ipynb @@ -45,7 +45,7 @@ }, "outputs": [], "source": [ - "from langchain.utilities.golden_query import GoldenQueryAPIWrapper" + "from langchain_community.utilities.golden_query import GoldenQueryAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/google_drive.ipynb b/docs/docs/integrations/tools/google_drive.ipynb index ac9d40b4585..7f87b4b8a99 100644 --- a/docs/docs/integrations/tools/google_drive.ipynb +++ b/docs/docs/integrations/tools/google_drive.ipynb @@ -99,8 +99,8 @@ }, "outputs": [], "source": [ - "from langchain.utilities.google_drive import GoogleDriveAPIWrapper\n", "from langchain_community.tools.google_drive.tool import GoogleDriveSearchTool\n", + "from langchain_community.utilities.google_drive import GoogleDriveAPIWrapper\n", "\n", "# By default, search only in the filename.\n", "tool = GoogleDriveSearchTool(\n", diff --git a/docs/docs/integrations/tools/google_finance.ipynb b/docs/docs/integrations/tools/google_finance.ipynb index 047dcd47985..7e3be7f9448 100644 --- a/docs/docs/integrations/tools/google_finance.ipynb +++ b/docs/docs/integrations/tools/google_finance.ipynb @@ -43,8 +43,8 @@ "source": [ "import os\n", "\n", - "from langchain.utilities.google_finance import GoogleFinanceAPIWrapper\n", "from langchain_community.tools.google_finance import GoogleFinanceQueryRun\n", + "from langchain_community.utilities.google_finance import GoogleFinanceAPIWrapper\n", "\n", "os.environ[\"SERPAPI_API_KEY\"] = \"\"\n", "tool = GoogleFinanceQueryRun(api_wrapper=GoogleFinanceAPIWrapper())" diff --git a/docs/docs/integrations/tools/google_jobs.ipynb b/docs/docs/integrations/tools/google_jobs.ipynb index cefcc7999e9..a64289fcb55 100644 --- a/docs/docs/integrations/tools/google_jobs.ipynb +++ b/docs/docs/integrations/tools/google_jobs.ipynb @@ -70,8 +70,8 @@ "source": [ "import os\n", "\n", - "from langchain.utilities.google_jobs import GoogleJobsAPIWrapper\n", "from langchain_community.tools.google_jobs import GoogleJobsQueryRun\n", + "from langchain_community.utilities.google_jobs import GoogleJobsAPIWrapper\n", "\n", "os.environ[\"SERPAPI_API_KEY\"] = \"[your serpapi key]\"\n", "tool = GoogleJobsQueryRun(api_wrapper=GoogleJobsAPIWrapper())" diff --git a/docs/docs/integrations/tools/google_lens.ipynb b/docs/docs/integrations/tools/google_lens.ipynb index 51deb4a458b..14240fcd380 100644 --- a/docs/docs/integrations/tools/google_lens.ipynb +++ b/docs/docs/integrations/tools/google_lens.ipynb @@ -50,8 +50,8 @@ "source": [ "import os\n", "\n", - "from langchain.utilities.google_lens import GoogleLensAPIWrapper\n", "from langchain_community.tools.google_lens import GoogleLensQueryRun\n", + "from langchain_community.utilities.google_lens import GoogleLensAPIWrapper\n", "\n", "os.environ[\"SERPAPI_API_KEY\"] = \"\"\n", "tool = GoogleLensQueryRun(api_wrapper=GoogleLensAPIWrapper())" diff --git a/docs/docs/integrations/tools/google_scholar.ipynb b/docs/docs/integrations/tools/google_scholar.ipynb index 9892ae5b073..4084fa89697 100644 --- a/docs/docs/integrations/tools/google_scholar.ipynb +++ b/docs/docs/integrations/tools/google_scholar.ipynb @@ -39,8 +39,8 @@ "source": [ "import os\n", "\n", - "from langchain.utilities.google_scholar import GoogleScholarAPIWrapper\n", - "from langchain_community.tools.google_scholar import GoogleScholarQueryRun" + "from langchain_community.tools.google_scholar import GoogleScholarQueryRun\n", + "from langchain_community.utilities.google_scholar import GoogleScholarAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/google_search.ipynb b/docs/docs/integrations/tools/google_search.ipynb index e94f7036b90..c8bb72cbbfc 100644 --- a/docs/docs/integrations/tools/google_search.ipynb +++ b/docs/docs/integrations/tools/google_search.ipynb @@ -35,7 +35,7 @@ "outputs": [], "source": [ "from langchain.tools import Tool\n", - "from langchain.utilities import GoogleSearchAPIWrapper\n", + "from langchain_community.utilities import GoogleSearchAPIWrapper\n", "\n", "search = GoogleSearchAPIWrapper()\n", "\n", diff --git a/docs/docs/integrations/tools/google_serper.ipynb b/docs/docs/integrations/tools/google_serper.ipynb index f2244504f66..f4760195057 100644 --- a/docs/docs/integrations/tools/google_serper.ipynb +++ b/docs/docs/integrations/tools/google_serper.ipynb @@ -47,7 +47,7 @@ }, "outputs": [], "source": [ - "from langchain.utilities import GoogleSerperAPIWrapper" + "from langchain_community.utilities import GoogleSerperAPIWrapper" ] }, { @@ -159,8 +159,8 @@ ], "source": [ "from langchain.agents import AgentType, Tool, initialize_agent\n", - "from langchain.utilities import GoogleSerperAPIWrapper\n", "from langchain_community.llms.openai import OpenAI\n", + "from langchain_community.utilities import GoogleSerperAPIWrapper\n", "\n", "llm = OpenAI(temperature=0)\n", "search = GoogleSerperAPIWrapper()\n", diff --git a/docs/docs/integrations/tools/google_trends.ipynb b/docs/docs/integrations/tools/google_trends.ipynb index c4a0c708798..56c279d3fe7 100644 --- a/docs/docs/integrations/tools/google_trends.ipynb +++ b/docs/docs/integrations/tools/google_trends.ipynb @@ -51,8 +51,8 @@ "source": [ "import os\n", "\n", - "from langchain.utilities.google_trends import GoogleTrendsAPIWrapper\n", "from langchain_community.tools.google_trends import GoogleTrendsQueryRun\n", + "from langchain_community.utilities.google_trends import GoogleTrendsAPIWrapper\n", "\n", "os.environ[\"SERPAPI_API_KEY\"] = \"\"\n", "tool = GoogleTrendsQueryRun(api_wrapper=GoogleTrendsAPIWrapper())" diff --git a/docs/docs/integrations/tools/openweathermap.ipynb b/docs/docs/integrations/tools/openweathermap.ipynb index 48a954a6194..9f81f50cbd9 100644 --- a/docs/docs/integrations/tools/openweathermap.ipynb +++ b/docs/docs/integrations/tools/openweathermap.ipynb @@ -29,7 +29,7 @@ "source": [ "import os\n", "\n", - "from langchain.utilities import OpenWeatherMapAPIWrapper\n", + "from langchain_community.utilities import OpenWeatherMapAPIWrapper\n", "\n", "os.environ[\"OPENWEATHERMAP_API_KEY\"] = \"\"\n", "\n", diff --git a/docs/docs/integrations/tools/reddit_search.ipynb b/docs/docs/integrations/tools/reddit_search.ipynb index 98846738269..5de27433349 100644 --- a/docs/docs/integrations/tools/reddit_search.ipynb +++ b/docs/docs/integrations/tools/reddit_search.ipynb @@ -50,8 +50,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities.reddit_search import RedditSearchAPIWrapper\n", "from langchain_community.tools.reddit_search.tool import RedditSearchRun\n", + "from langchain_community.utilities.reddit_search import RedditSearchAPIWrapper\n", "\n", "search = RedditSearchRun(\n", " api_wrapper=RedditSearchAPIWrapper(\n", @@ -173,9 +173,9 @@ "from langchain.chains import LLMChain\n", "from langchain.memory import ConversationBufferMemory, ReadOnlySharedMemory\n", "from langchain.prompts import PromptTemplate\n", - "from langchain.utilities.reddit_search import RedditSearchAPIWrapper\n", "from langchain_community.chat_models import ChatOpenAI\n", "from langchain_community.tools.reddit_search.tool import RedditSearchRun\n", + "from langchain_community.utilities.reddit_search import RedditSearchAPIWrapper\n", "\n", "# Provide keys for Reddit\n", "client_id = \"\"\n", diff --git a/docs/docs/integrations/tools/requests.ipynb b/docs/docs/integrations/tools/requests.ipynb index 564d28d3f64..763bc0e022a 100644 --- a/docs/docs/integrations/tools/requests.ipynb +++ b/docs/docs/integrations/tools/requests.ipynb @@ -87,7 +87,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import TextRequestsWrapper\n", + "from langchain_community.utilities import TextRequestsWrapper\n", "\n", "requests = TextRequestsWrapper()" ] diff --git a/docs/docs/integrations/tools/searchapi.ipynb b/docs/docs/integrations/tools/searchapi.ipynb index ecdfdb3f2fc..e48e3014b4d 100644 --- a/docs/docs/integrations/tools/searchapi.ipynb +++ b/docs/docs/integrations/tools/searchapi.ipynb @@ -29,7 +29,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import SearchApiAPIWrapper" + "from langchain_community.utilities import SearchApiAPIWrapper" ] }, { @@ -125,8 +125,8 @@ ], "source": [ "from langchain.agents import AgentType, Tool, initialize_agent\n", - "from langchain.utilities import SearchApiAPIWrapper\n", "from langchain_community.llms.openai import OpenAI\n", + "from langchain_community.utilities import SearchApiAPIWrapper\n", "\n", "llm = OpenAI(temperature=0)\n", "search = SearchApiAPIWrapper()\n", diff --git a/docs/docs/integrations/tools/searx_search.ipynb b/docs/docs/integrations/tools/searx_search.ipynb index 70246cb8b43..808a95645a4 100644 --- a/docs/docs/integrations/tools/searx_search.ipynb +++ b/docs/docs/integrations/tools/searx_search.ipynb @@ -23,7 +23,7 @@ "source": [ "import pprint\n", "\n", - "from langchain.utilities import SearxSearchWrapper" + "from langchain_community.utilities import SearxSearchWrapper" ] }, { diff --git a/docs/docs/integrations/tools/serpapi.ipynb b/docs/docs/integrations/tools/serpapi.ipynb index f394000f4a9..b9e32f1d40d 100644 --- a/docs/docs/integrations/tools/serpapi.ipynb +++ b/docs/docs/integrations/tools/serpapi.ipynb @@ -17,7 +17,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import SerpAPIWrapper" + "from langchain_community.utilities import SerpAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/stackexchange.ipynb b/docs/docs/integrations/tools/stackexchange.ipynb index dbd00f1b64e..2f9dcd11707 100644 --- a/docs/docs/integrations/tools/stackexchange.ipynb +++ b/docs/docs/integrations/tools/stackexchange.ipynb @@ -28,7 +28,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import StackExchangeAPIWrapper" + "from langchain_community.utilities import StackExchangeAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/tavily_search.ipynb b/docs/docs/integrations/tools/tavily_search.ipynb index 0bb8561d2e2..978b91c8378 100644 --- a/docs/docs/integrations/tools/tavily_search.ipynb +++ b/docs/docs/integrations/tools/tavily_search.ipynb @@ -82,9 +82,9 @@ "import os\n", "\n", "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.utilities.tavily_search import TavilySearchAPIWrapper\n", "from langchain_community.chat_models import ChatOpenAI\n", "from langchain_community.tools.tavily_search import TavilySearchResults\n", + "from langchain_community.utilities.tavily_search import TavilySearchAPIWrapper\n", "\n", "# set up API key\n", "os.environ[\"TAVILY_API_KEY\"] = \"...\"\n", diff --git a/docs/docs/integrations/tools/twilio.ipynb b/docs/docs/integrations/tools/twilio.ipynb index 0e0411a13d4..f87bdc4f5ec 100644 --- a/docs/docs/integrations/tools/twilio.ipynb +++ b/docs/docs/integrations/tools/twilio.ipynb @@ -61,7 +61,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities.twilio import TwilioAPIWrapper" + "from langchain_community.utilities.twilio import TwilioAPIWrapper" ] }, { @@ -113,7 +113,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities.twilio import TwilioAPIWrapper" + "from langchain_community.utilities.twilio import TwilioAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/wikipedia.ipynb b/docs/docs/integrations/tools/wikipedia.ipynb index ccb84903699..1c8741a86f1 100644 --- a/docs/docs/integrations/tools/wikipedia.ipynb +++ b/docs/docs/integrations/tools/wikipedia.ipynb @@ -34,7 +34,7 @@ "outputs": [], "source": [ "from langchain.tools import WikipediaQueryRun\n", - "from langchain.utilities import WikipediaAPIWrapper" + "from langchain_community.utilities import WikipediaAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/wolfram_alpha.ipynb b/docs/docs/integrations/tools/wolfram_alpha.ipynb index 3f9be534dea..25ff80268c3 100644 --- a/docs/docs/integrations/tools/wolfram_alpha.ipynb +++ b/docs/docs/integrations/tools/wolfram_alpha.ipynb @@ -53,7 +53,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities.wolfram_alpha import WolframAlphaAPIWrapper" + "from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper" ] }, { diff --git a/docs/docs/integrations/tools/zapier.ipynb b/docs/docs/integrations/tools/zapier.ipynb index a2546829100..4629ee2344b 100644 --- a/docs/docs/integrations/tools/zapier.ipynb +++ b/docs/docs/integrations/tools/zapier.ipynb @@ -61,9 +61,9 @@ "outputs": [], "source": [ "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.utilities.zapier import ZapierNLAWrapper\n", "from langchain_community.agent_toolkits import ZapierToolkit\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities.zapier import ZapierNLAWrapper" ] }, { @@ -162,9 +162,9 @@ "source": [ "from langchain.chains import LLMChain, SimpleSequentialChain, TransformChain\n", "from langchain.prompts import PromptTemplate\n", - "from langchain.utilities.zapier import ZapierNLAWrapper\n", "from langchain_community.llms import OpenAI\n", - "from langchain_community.tools.zapier.tool import ZapierNLARunAction" + "from langchain_community.tools.zapier.tool import ZapierNLARunAction\n", + "from langchain_community.utilities.zapier import ZapierNLAWrapper" ] }, { diff --git a/docs/docs/modules/memory/agent_with_memory.ipynb b/docs/docs/modules/memory/agent_with_memory.ipynb index 3e0c5c337f8..430a4a8986f 100644 --- a/docs/docs/modules/memory/agent_with_memory.ipynb +++ b/docs/docs/modules/memory/agent_with_memory.ipynb @@ -30,8 +30,8 @@ "from langchain.agents import AgentExecutor, Tool, ZeroShotAgent\n", "from langchain.chains import LLMChain\n", "from langchain.memory import ConversationBufferMemory\n", - "from langchain.utilities import GoogleSearchAPIWrapper\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import GoogleSearchAPIWrapper" ] }, { diff --git a/docs/docs/modules/memory/agent_with_memory_in_db.ipynb b/docs/docs/modules/memory/agent_with_memory_in_db.ipynb index 3ce8f1104ad..054500d6ad2 100644 --- a/docs/docs/modules/memory/agent_with_memory_in_db.ipynb +++ b/docs/docs/modules/memory/agent_with_memory_in_db.ipynb @@ -36,9 +36,9 @@ "from langchain.agents import AgentExecutor, Tool, ZeroShotAgent\n", "from langchain.chains import LLMChain\n", "from langchain.memory import ConversationBufferMemory\n", - "from langchain.utilities import GoogleSearchAPIWrapper\n", "from langchain_community.chat_message_histories import RedisChatMessageHistory\n", - "from langchain_community.llms import OpenAI" + "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import GoogleSearchAPIWrapper" ] }, { diff --git a/docs/docs/use_cases/qa_structured/sql.ipynb b/docs/docs/use_cases/qa_structured/sql.ipynb index 8351b689556..e706933b2da 100644 --- a/docs/docs/use_cases/qa_structured/sql.ipynb +++ b/docs/docs/use_cases/qa_structured/sql.ipynb @@ -85,8 +85,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.utilities import SQLDatabase\n", "from langchain_community.llms import OpenAI\n", + "from langchain_community.utilities import SQLDatabase\n", "from langchain_experimental.sql import SQLDatabaseChain\n", "\n", "db = SQLDatabase.from_uri(\"sqlite:///Chinook.db\")\n", @@ -836,9 +836,9 @@ "outputs": [], "source": [ "from langchain.agents import AgentType, create_sql_agent\n", - "from langchain.utilities import SQLDatabase\n", "from langchain_community.agent_toolkits import SQLDatabaseToolkit\n", "from langchain_community.chat_models import ChatOpenAI\n", + "from langchain_community.utilities import SQLDatabase\n", "\n", "db = SQLDatabase.from_uri(\"sqlite:///Chinook.db\")\n", "llm = ChatOpenAI(model_name=\"gpt-4\", temperature=0)\n", @@ -995,9 +995,9 @@ "outputs": [], "source": [ "from langchain.agents import AgentType, create_sql_agent\n", - "from langchain.utilities import SQLDatabase\n", "from langchain_community.agent_toolkits import SQLDatabaseToolkit\n", "from langchain_community.chat_models import ChatOpenAI\n", + "from langchain_community.utilities import SQLDatabase\n", "\n", "# db = SQLDatabase.from_uri(\"sqlite:///Chinook.db\")\n", "llm = ChatOpenAI(model_name=\"gpt-4\", temperature=0)\n", diff --git a/docs/docs/use_cases/web_scraping.ipynb b/docs/docs/use_cases/web_scraping.ipynb index e2116a62ac8..327d4bb71ad 100644 --- a/docs/docs/use_cases/web_scraping.ipynb +++ b/docs/docs/use_cases/web_scraping.ipynb @@ -480,9 +480,9 @@ "outputs": [], "source": [ "from langchain.retrievers.web_research import WebResearchRetriever\n", - "from langchain.utilities import GoogleSearchAPIWrapper\n", "from langchain_community.chat_models.openai import ChatOpenAI\n", "from langchain_community.embeddings import OpenAIEmbeddings\n", + "from langchain_community.utilities import GoogleSearchAPIWrapper\n", "from langchain_community.vectorstores import Chroma" ] }, @@ -631,7 +631,7 @@ "source": [ "from langchain.docstore.document import Document\n", "from langchain.indexes import VectorstoreIndexCreator\n", - "from langchain.utilities import ApifyWrapper\n", + "from langchain_community.utilities import ApifyWrapper\n", "\n", "apify = ApifyWrapper()\n", "# Call the Actor to obtain text from the crawled webpages\n", diff --git a/libs/community/tests/integration_tests/llms/test_arcee.py b/libs/community/tests/integration_tests/llms/test_arcee.py index 95797988f01..b3b7620cd0a 100644 --- a/libs/community/tests/integration_tests/llms/test_arcee.py +++ b/libs/community/tests/integration_tests/llms/test_arcee.py @@ -6,7 +6,7 @@ from pytest import CaptureFixture, MonkeyPatch from langchain_community.llms.arcee import Arcee -@patch("langchain.utilities.arcee.requests.get") +@patch("langchain_community.utilities.arcee.requests.get") def test_arcee_api_key_is_secret_string(mock_get: MagicMock) -> None: mock_response = mock_get.return_value mock_response.status_code = 200 @@ -24,7 +24,7 @@ def test_arcee_api_key_is_secret_string(mock_get: MagicMock) -> None: assert isinstance(arcee_without_env_var.arcee_api_key, SecretStr) -@patch("langchain.utilities.arcee.requests.get") +@patch("langchain_community.utilities.arcee.requests.get") def test_api_key_masked_when_passed_via_constructor( mock_get: MagicMock, capsys: CaptureFixture ) -> None: @@ -47,7 +47,7 @@ def test_api_key_masked_when_passed_via_constructor( assert "**********" == captured.out -@patch("langchain.utilities.arcee.requests.get") +@patch("langchain_community.utilities.arcee.requests.get") def test_api_key_masked_when_passed_from_env( mock_get: MagicMock, capsys: CaptureFixture, monkeypatch: MonkeyPatch ) -> None: diff --git a/libs/experimental/langchain_experimental/llm_bash/bash.py b/libs/experimental/langchain_experimental/llm_bash/bash.py index 9c6c4fcf719..5a78f5dc97a 100644 --- a/libs/experimental/langchain_experimental/llm_bash/bash.py +++ b/libs/experimental/langchain_experimental/llm_bash/bash.py @@ -23,7 +23,7 @@ class BashProcess: Example: .. code-block:: python - from langchain.utilities.bash import BashProcess + from langchain_community.utilities.bash import BashProcess bash = BashProcess( strip_newlines = False, diff --git a/libs/experimental/langchain_experimental/pal_chain/base.py b/libs/experimental/langchain_experimental/pal_chain/base.py index 71c7a7446d6..d30655ed37a 100644 --- a/libs/experimental/langchain_experimental/pal_chain/base.py +++ b/libs/experimental/langchain_experimental/pal_chain/base.py @@ -13,7 +13,7 @@ from typing import Any, Dict, List, Optional from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain from langchain.chains.llm import LLMChain -from langchain.utilities import PythonREPL +from langchain_community.utilities import PythonREPL from langchain_core.language_models import BaseLanguageModel from langchain_experimental.pal_chain.colored_object_prompt import COLORED_OBJECT_PROMPT diff --git a/libs/experimental/langchain_experimental/sql/base.py b/libs/experimental/langchain_experimental/sql/base.py index 00e89d4e579..1785598e36a 100644 --- a/libs/experimental/langchain_experimental/sql/base.py +++ b/libs/experimental/langchain_experimental/sql/base.py @@ -10,8 +10,8 @@ from langchain.chains.llm import LLMChain from langchain.chains.sql_database.prompt import DECIDER_PROMPT, PROMPT, SQL_PROMPTS from langchain.prompts.prompt import PromptTemplate from langchain.schema import BasePromptTemplate -from langchain.utilities.sql_database import SQLDatabase from langchain_community.tools.sql_database.prompt import QUERY_CHECKER +from langchain_community.utilities.sql_database import SQLDatabase from langchain_core.language_models import BaseLanguageModel from langchain_experimental.pydantic_v1 import Extra, Field, root_validator diff --git a/libs/experimental/langchain_experimental/sql/vector_sql.py b/libs/experimental/langchain_experimental/sql/vector_sql.py index 5bd36e2c41e..1b0db959044 100644 --- a/libs/experimental/langchain_experimental/sql/vector_sql.py +++ b/libs/experimental/langchain_experimental/sql/vector_sql.py @@ -8,8 +8,8 @@ from langchain.chains.llm import LLMChain from langchain.chains.sql_database.prompt import PROMPT, SQL_PROMPTS from langchain.prompts.prompt import PromptTemplate from langchain.schema import BaseOutputParser, BasePromptTemplate -from langchain.utilities.sql_database import SQLDatabase from langchain_community.tools.sql_database.prompt import QUERY_CHECKER +from langchain_community.utilities.sql_database import SQLDatabase from langchain_core.embeddings import Embeddings from langchain_core.language_models import BaseLanguageModel diff --git a/libs/experimental/tests/integration_tests/chains/test_sql_database.py b/libs/experimental/tests/integration_tests/chains/test_sql_database.py index 54783d2a638..8dbbee3f30d 100644 --- a/libs/experimental/tests/integration_tests/chains/test_sql_database.py +++ b/libs/experimental/tests/integration_tests/chains/test_sql_database.py @@ -1,6 +1,6 @@ """Test SQL Database Chain.""" -from langchain.utilities.sql_database import SQLDatabase from langchain_community.llms.openai import OpenAI +from langchain_community.utilities.sql_database import SQLDatabase from sqlalchemy import Column, Integer, MetaData, String, Table, create_engine, insert from langchain_experimental.sql.base import ( diff --git a/libs/langchain/langchain/__init__.py b/libs/langchain/langchain/__init__.py index 6843ba3cb5e..1b664bbe2e7 100644 --- a/libs/langchain/langchain/__init__.py +++ b/libs/langchain/langchain/__init__.py @@ -259,57 +259,73 @@ def __getattr__(name: str) -> Any: return BasePromptTemplate elif name == "ArxivAPIWrapper": - from langchain.utilities import ArxivAPIWrapper + from langchain_community.utilities import ArxivAPIWrapper - _warn_on_import(name, replacement="langchain.utilities.ArxivAPIWrapper") + _warn_on_import( + name, replacement="langchain_community.utilities.ArxivAPIWrapper" + ) return ArxivAPIWrapper elif name == "GoldenQueryAPIWrapper": - from langchain.utilities import GoldenQueryAPIWrapper + from langchain_community.utilities import GoldenQueryAPIWrapper - _warn_on_import(name, replacement="langchain.utilities.GoldenQueryAPIWrapper") + _warn_on_import( + name, replacement="langchain_community.utilities.GoldenQueryAPIWrapper" + ) return GoldenQueryAPIWrapper elif name == "GoogleSearchAPIWrapper": - from langchain.utilities import GoogleSearchAPIWrapper + from langchain_community.utilities import GoogleSearchAPIWrapper - _warn_on_import(name, replacement="langchain.utilities.GoogleSearchAPIWrapper") + _warn_on_import( + name, replacement="langchain_community.utilities.GoogleSearchAPIWrapper" + ) return GoogleSearchAPIWrapper elif name == "GoogleSerperAPIWrapper": - from langchain.utilities import GoogleSerperAPIWrapper + from langchain_community.utilities import GoogleSerperAPIWrapper - _warn_on_import(name, replacement="langchain.utilities.GoogleSerperAPIWrapper") + _warn_on_import( + name, replacement="langchain_community.utilities.GoogleSerperAPIWrapper" + ) return GoogleSerperAPIWrapper elif name == "PowerBIDataset": - from langchain.utilities import PowerBIDataset + from langchain_community.utilities import PowerBIDataset - _warn_on_import(name, replacement="langchain.utilities.PowerBIDataset") + _warn_on_import( + name, replacement="langchain_community.utilities.PowerBIDataset" + ) return PowerBIDataset elif name == "SearxSearchWrapper": - from langchain.utilities import SearxSearchWrapper + from langchain_community.utilities import SearxSearchWrapper - _warn_on_import(name, replacement="langchain.utilities.SearxSearchWrapper") + _warn_on_import( + name, replacement="langchain_community.utilities.SearxSearchWrapper" + ) return SearxSearchWrapper elif name == "WikipediaAPIWrapper": - from langchain.utilities import WikipediaAPIWrapper + from langchain_community.utilities import WikipediaAPIWrapper - _warn_on_import(name, replacement="langchain.utilities.WikipediaAPIWrapper") + _warn_on_import( + name, replacement="langchain_community.utilities.WikipediaAPIWrapper" + ) return WikipediaAPIWrapper elif name == "WolframAlphaAPIWrapper": - from langchain.utilities import WolframAlphaAPIWrapper + from langchain_community.utilities import WolframAlphaAPIWrapper - _warn_on_import(name, replacement="langchain.utilities.WolframAlphaAPIWrapper") + _warn_on_import( + name, replacement="langchain_community.utilities.WolframAlphaAPIWrapper" + ) return WolframAlphaAPIWrapper elif name == "SQLDatabase": - from langchain.utilities import SQLDatabase + from langchain_community.utilities import SQLDatabase - _warn_on_import(name, replacement="langchain.utilities.SQLDatabase") + _warn_on_import(name, replacement="langchain_community.utilities.SQLDatabase") return SQLDatabase elif name == "FAISS": @@ -328,9 +344,11 @@ def __getattr__(name: str) -> Any: return ElasticVectorSearch # For backwards compatibility elif name == "SerpAPIChain" or name == "SerpAPIWrapper": - from langchain.utilities import SerpAPIWrapper + from langchain_community.utilities import SerpAPIWrapper - _warn_on_import(name, replacement="langchain.utilities.SerpAPIWrapper") + _warn_on_import( + name, replacement="langchain_community.utilities.SerpAPIWrapper" + ) return SerpAPIWrapper elif name == "verbose": diff --git a/libs/langchain/langchain/agents/load_tools.py b/libs/langchain/langchain/agents/load_tools.py index 07f6dc986d7..192048b9fc1 100644 --- a/libs/langchain/langchain/agents/load_tools.py +++ b/libs/langchain/langchain/agents/load_tools.py @@ -25,8 +25,8 @@ from langchain.callbacks.manager import Callbacks from langchain.chains.api import news_docs, open_meteo_docs, podcast_docs, tmdb_docs from langchain.chains.api.base import APIChain from langchain.chains.llm_math.base import LLMMathChain -from langchain.utilities.dalle_image_generator import DallEAPIWrapper -from langchain.utilities.requests import TextRequestsWrapper +from langchain_community.utilities.dalle_image_generator import DallEAPIWrapper +from langchain_community.utilities.requests import TextRequestsWrapper from langchain_community.tools.arxiv.tool import ArxivQueryRun from langchain_community.tools.golden_query.tool import GoldenQueryRun from langchain_community.tools.pubmed.tool import PubmedQueryRun @@ -77,32 +77,32 @@ from langchain_community.tools.dataforseo_api_search import DataForSeoAPISearchR from langchain_community.tools.dataforseo_api_search import DataForSeoAPISearchResults from langchain_community.tools.memorize.tool import Memorize from langchain_community.tools.reddit_search.tool import RedditSearchRun -from langchain.utilities.arxiv import ArxivAPIWrapper -from langchain.utilities.golden_query import GoldenQueryAPIWrapper -from langchain.utilities.pubmed import PubMedAPIWrapper -from langchain.utilities.bing_search import BingSearchAPIWrapper -from langchain.utilities.duckduckgo_search import DuckDuckGoSearchAPIWrapper -from langchain.utilities.google_lens import GoogleLensAPIWrapper -from langchain.utilities.google_jobs import GoogleJobsAPIWrapper -from langchain.utilities.google_search import GoogleSearchAPIWrapper -from langchain.utilities.google_serper import GoogleSerperAPIWrapper -from langchain.utilities.google_scholar import GoogleScholarAPIWrapper -from langchain.utilities.google_finance import GoogleFinanceAPIWrapper -from langchain.utilities.google_trends import GoogleTrendsAPIWrapper -from langchain.utilities.metaphor_search import MetaphorSearchAPIWrapper -from langchain.utilities.awslambda import LambdaWrapper -from langchain.utilities.graphql import GraphQLAPIWrapper -from langchain.utilities.searchapi import SearchApiAPIWrapper -from langchain.utilities.searx_search import SearxSearchWrapper -from langchain.utilities.serpapi import SerpAPIWrapper -from langchain.utilities.stackexchange import StackExchangeAPIWrapper -from langchain.utilities.twilio import TwilioAPIWrapper -from langchain.utilities.merriam_webster import MerriamWebsterAPIWrapper -from langchain.utilities.wikipedia import WikipediaAPIWrapper -from langchain.utilities.wolfram_alpha import WolframAlphaAPIWrapper -from langchain.utilities.openweathermap import OpenWeatherMapAPIWrapper -from langchain.utilities.dataforseo_api_search import DataForSeoAPIWrapper -from langchain.utilities.reddit_search import RedditSearchAPIWrapper +from langchain_community.utilities.arxiv import ArxivAPIWrapper +from langchain_community.utilities.golden_query import GoldenQueryAPIWrapper +from langchain_community.utilities.pubmed import PubMedAPIWrapper +from langchain_community.utilities.bing_search import BingSearchAPIWrapper +from langchain_community.utilities.duckduckgo_search import DuckDuckGoSearchAPIWrapper +from langchain_community.utilities.google_lens import GoogleLensAPIWrapper +from langchain_community.utilities.google_jobs import GoogleJobsAPIWrapper +from langchain_community.utilities.google_search import GoogleSearchAPIWrapper +from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper +from langchain_community.utilities.google_scholar import GoogleScholarAPIWrapper +from langchain_community.utilities.google_finance import GoogleFinanceAPIWrapper +from langchain_community.utilities.google_trends import GoogleTrendsAPIWrapper +from langchain_community.utilities.metaphor_search import MetaphorSearchAPIWrapper +from langchain_community.utilities.awslambda import LambdaWrapper +from langchain_community.utilities.graphql import GraphQLAPIWrapper +from langchain_community.utilities.searchapi import SearchApiAPIWrapper +from langchain_community.utilities.searx_search import SearxSearchWrapper +from langchain_community.utilities.serpapi import SerpAPIWrapper +from langchain_community.utilities.stackexchange import StackExchangeAPIWrapper +from langchain_community.utilities.twilio import TwilioAPIWrapper +from langchain_community.utilities.merriam_webster import MerriamWebsterAPIWrapper +from langchain_community.utilities.wikipedia import WikipediaAPIWrapper +from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper +from langchain_community.utilities.openweathermap import OpenWeatherMapAPIWrapper +from langchain_community.utilities.dataforseo_api_search import DataForSeoAPIWrapper +from langchain_community.utilities.reddit_search import RedditSearchAPIWrapper def _get_tools_requests_get() -> BaseTool: diff --git a/libs/langchain/langchain/agents/loading.py b/libs/langchain/langchain/agents/loading.py index b3bf5157876..a1691d543aa 100644 --- a/libs/langchain/langchain/agents/loading.py +++ b/libs/langchain/langchain/agents/loading.py @@ -6,12 +6,12 @@ from typing import Any, List, Optional, Union import yaml from langchain_core.language_models import BaseLanguageModel +from langchain_core.utils.loading import try_load_from_hub from langchain.agents.agent import BaseMultiActionAgent, BaseSingleActionAgent from langchain.agents.tools import Tool from langchain.agents.types import AGENT_TO_CLASS from langchain.chains.loading import load_chain, load_chain_from_config -from langchain.utilities.loading import try_load_from_hub logger = logging.getLogger(__file__) diff --git a/libs/langchain/langchain/agents/self_ask_with_search/base.py b/libs/langchain/langchain/agents/self_ask_with_search/base.py index 34604f5f44a..d5aeb0bb7cc 100644 --- a/libs/langchain/langchain/agents/self_ask_with_search/base.py +++ b/libs/langchain/langchain/agents/self_ask_with_search/base.py @@ -1,6 +1,9 @@ """Chain that does self-ask with search.""" from typing import Any, Sequence, Union +from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper +from langchain_community.utilities.searchapi import SearchApiAPIWrapper +from langchain_community.utilities.serpapi import SerpAPIWrapper from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field @@ -14,9 +17,6 @@ from langchain.agents.self_ask_with_search.output_parser import SelfAskOutputPar from langchain.agents.self_ask_with_search.prompt import PROMPT from langchain.agents.tools import Tool from langchain.agents.utils import validate_tools_single_input -from langchain.utilities.google_serper import GoogleSerperAPIWrapper -from langchain.utilities.searchapi import SearchApiAPIWrapper -from langchain.utilities.serpapi import SerpAPIWrapper class SelfAskWithSearchAgent(Agent): diff --git a/libs/langchain/langchain/chains/api/base.py b/libs/langchain/langchain/chains/api/base.py index f677cb800da..0afd7bd383c 100644 --- a/libs/langchain/langchain/chains/api/base.py +++ b/libs/langchain/langchain/chains/api/base.py @@ -4,6 +4,7 @@ from __future__ import annotations from typing import Any, Dict, List, Optional, Sequence, Tuple from urllib.parse import urlparse +from langchain_community.utilities.requests import TextRequestsWrapper from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field, root_validator @@ -15,7 +16,6 @@ from langchain.callbacks.manager import ( from langchain.chains.api.prompt import API_RESPONSE_PROMPT, API_URL_PROMPT from langchain.chains.base import Chain from langchain.chains.llm import LLMChain -from langchain.utilities.requests import TextRequestsWrapper def _extract_scheme_and_domain(url: str) -> Tuple[str, str]: diff --git a/libs/langchain/langchain/chains/api/openapi/chain.py b/libs/langchain/langchain/chains/api/openapi/chain.py index c918c2ca5e4..ead27fbf37d 100644 --- a/libs/langchain/langchain/chains/api/openapi/chain.py +++ b/libs/langchain/langchain/chains/api/openapi/chain.py @@ -5,6 +5,7 @@ import json from typing import Any, Dict, List, NamedTuple, Optional, cast from langchain_community.tools.openapi.utils.api_models import APIOperation +from langchain_community.utilities.requests import Requests from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import BaseModel, Field from requests import Response @@ -14,7 +15,6 @@ from langchain.chains.api.openapi.requests_chain import APIRequesterChain from langchain.chains.api.openapi.response_chain import APIResponderChain from langchain.chains.base import Chain from langchain.chains.llm import LLMChain -from langchain.utilities.requests import Requests class _ParamMapping(NamedTuple): diff --git a/libs/langchain/langchain/chains/llm_requests.py b/libs/langchain/langchain/chains/llm_requests.py index 0dcb2b3cecf..8daf49f4eee 100644 --- a/libs/langchain/langchain/chains/llm_requests.py +++ b/libs/langchain/langchain/chains/llm_requests.py @@ -3,12 +3,12 @@ from __future__ import annotations from typing import Any, Dict, List, Optional +from langchain_community.utilities.requests import TextRequestsWrapper from langchain_core.pydantic_v1 import Extra, Field, root_validator from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains import LLMChain from langchain.chains.base import Chain -from langchain.utilities.requests import TextRequestsWrapper DEFAULT_HEADERS = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36" # noqa: E501 diff --git a/libs/langchain/langchain/chains/loading.py b/libs/langchain/langchain/chains/loading.py index 64565b61da5..0675b87141a 100644 --- a/libs/langchain/langchain/chains/loading.py +++ b/libs/langchain/langchain/chains/loading.py @@ -10,6 +10,7 @@ from langchain_core.prompts.loading import ( load_prompt, load_prompt_from_config, ) +from langchain_core.utils.loading import try_load_from_hub from langchain.chains import ReduceDocumentsChain from langchain.chains.api.base import APIChain @@ -28,7 +29,6 @@ from langchain.chains.qa_with_sources.base import QAWithSourcesChain from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesChain from langchain.chains.qa_with_sources.vector_db import VectorDBQAWithSourcesChain from langchain.chains.retrieval_qa.base import RetrievalQA, VectorDBQA -from langchain.utilities.loading import try_load_from_hub URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/chains/" diff --git a/libs/langchain/langchain/chains/openai_functions/openapi.py b/libs/langchain/langchain/chains/openai_functions/openapi.py index af5c4b32330..549872f25b2 100644 --- a/libs/langchain/langchain/chains/openai_functions/openapi.py +++ b/libs/langchain/langchain/chains/openai_functions/openapi.py @@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Un import requests from langchain_community.chat_models import ChatOpenAI +from langchain_community.utilities.openapi import OpenAPISpec from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import BasePromptTemplate, ChatPromptTemplate from langchain_core.utils.input import get_colored_text @@ -18,7 +19,6 @@ from langchain.chains.llm import LLMChain from langchain.chains.sequential import SequentialChain from langchain.output_parsers.openai_functions import JsonOutputFunctionsParser from langchain.tools import APIOperation -from langchain.utilities.openapi import OpenAPISpec if TYPE_CHECKING: from openapi_pydantic import Parameter diff --git a/libs/langchain/langchain/chains/sql_database/query.py b/libs/langchain/langchain/chains/sql_database/query.py index a8dd685a436..63a61fc1022 100644 --- a/libs/langchain/langchain/chains/sql_database/query.py +++ b/libs/langchain/langchain/chains/sql_database/query.py @@ -1,12 +1,12 @@ from typing import List, Optional, TypedDict, Union +from langchain_community.utilities.sql_database import SQLDatabase from langchain_core.language_models import BaseLanguageModel from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import BasePromptTemplate from langchain_core.runnables import Runnable, RunnableParallel from langchain.chains.sql_database.prompt import PROMPT, SQL_PROMPTS -from langchain.utilities.sql_database import SQLDatabase def _strip(text: str) -> str: diff --git a/libs/langchain/langchain/memory/entity.py b/libs/langchain/langchain/memory/entity.py index db265cc5c20..f2fb62ed965 100644 --- a/libs/langchain/langchain/memory/entity.py +++ b/libs/langchain/langchain/memory/entity.py @@ -3,6 +3,7 @@ from abc import ABC, abstractmethod from itertools import islice from typing import Any, Dict, Iterable, List, Optional +from langchain_community.utilities.redis import get_client from langchain_core.language_models import BaseLanguageModel from langchain_core.messages import BaseMessage, get_buffer_string from langchain_core.prompts import BasePromptTemplate @@ -15,7 +16,6 @@ from langchain.memory.prompt import ( ENTITY_SUMMARIZATION_PROMPT, ) from langchain.memory.utils import get_prompt_input_key -from langchain.utilities.redis import get_client logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/python.py b/libs/langchain/langchain/python.py index 28b9c306968..66685358809 100644 --- a/libs/langchain/langchain/python.py +++ b/libs/langchain/langchain/python.py @@ -1,4 +1,4 @@ """For backwards compatibility.""" -from langchain.utilities.python import PythonREPL +from langchain_community.utilities.python import PythonREPL __all__ = ["PythonREPL"] diff --git a/libs/langchain/langchain/requests.py b/libs/langchain/langchain/requests.py index a59b65a5b65..f848efbf7cd 100644 --- a/libs/langchain/langchain/requests.py +++ b/libs/langchain/langchain/requests.py @@ -1,5 +1,5 @@ """DEPRECATED: Kept for backwards compatibility.""" -from langchain.utilities import Requests, RequestsWrapper, TextRequestsWrapper +from langchain_community.utilities import Requests, RequestsWrapper, TextRequestsWrapper __all__ = [ "Requests", diff --git a/libs/langchain/langchain/retrievers/web_research.py b/libs/langchain/langchain/retrievers/web_research.py index 02d31d43a33..92d790cc858 100644 --- a/libs/langchain/langchain/retrievers/web_research.py +++ b/libs/langchain/langchain/retrievers/web_research.py @@ -5,6 +5,7 @@ from typing import List, Optional from langchain_community.document_loaders import AsyncHtmlLoader from langchain_community.document_transformers import Html2TextTransformer from langchain_community.llms import LlamaCpp +from langchain_community.utilities import GoogleSearchAPIWrapper from langchain_core.documents import Document from langchain_core.language_models import BaseLLM from langchain_core.prompts import BasePromptTemplate, PromptTemplate @@ -20,7 +21,6 @@ from langchain.chains import LLMChain from langchain.chains.prompt_selector import ConditionalPromptSelector from langchain.output_parsers.pydantic import PydanticOutputParser from langchain.text_splitter import RecursiveCharacterTextSplitter, TextSplitter -from langchain.utilities import GoogleSearchAPIWrapper logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/serpapi.py b/libs/langchain/langchain/serpapi.py index 460ee8c5ec2..3b3dd24885d 100644 --- a/libs/langchain/langchain/serpapi.py +++ b/libs/langchain/langchain/serpapi.py @@ -1,4 +1,4 @@ """For backwards compatibility.""" -from langchain.utilities.serpapi import SerpAPIWrapper +from langchain_community.utilities.serpapi import SerpAPIWrapper __all__ = ["SerpAPIWrapper"] diff --git a/libs/langchain/langchain/sql_database.py b/libs/langchain/langchain/sql_database.py index 47623c04e77..a1de4ade2aa 100644 --- a/libs/langchain/langchain/sql_database.py +++ b/libs/langchain/langchain/sql_database.py @@ -1,4 +1,4 @@ """Keep here for backwards compatibility.""" -from langchain.utilities.sql_database import SQLDatabase +from langchain_community.utilities.sql_database import SQLDatabase __all__ = ["SQLDatabase"] diff --git a/libs/langchain/langchain/tools/openapi/utils/openapi_utils.py b/libs/langchain/langchain/tools/openapi/utils/openapi_utils.py index eb376819263..e958d19d838 100644 --- a/libs/langchain/langchain/tools/openapi/utils/openapi_utils.py +++ b/libs/langchain/langchain/tools/openapi/utils/openapi_utils.py @@ -1,4 +1,4 @@ """Utility functions for parsing an OpenAPI spec. Kept for backwards compat.""" -from langchain.utilities.openapi import HTTPVerb, OpenAPISpec +from langchain_community.utilities.openapi import HTTPVerb, OpenAPISpec __all__ = ["HTTPVerb", "OpenAPISpec"] diff --git a/libs/langchain/langchain/utilities/__init__.py b/libs/langchain/langchain/utilities/__init__.py index 986f99de295..ac91e964d2d 100644 --- a/libs/langchain/langchain/utilities/__init__.py +++ b/libs/langchain/langchain/utilities/__init__.py @@ -5,263 +5,269 @@ and packages. """ from typing import Any -from langchain.utilities.requests import Requests, RequestsWrapper, TextRequestsWrapper +from langchain_community.utilities.requests import ( + Requests, + RequestsWrapper, + TextRequestsWrapper, +) def _import_alpha_vantage() -> Any: - from langchain.utilities.alpha_vantage import AlphaVantageAPIWrapper + from langchain_community.utilities.alpha_vantage import AlphaVantageAPIWrapper return AlphaVantageAPIWrapper def _import_apify() -> Any: - from langchain.utilities.apify import ApifyWrapper + from langchain_community.utilities.apify import ApifyWrapper return ApifyWrapper def _import_arcee() -> Any: - from langchain.utilities.arcee import ArceeWrapper + from langchain_community.utilities.arcee import ArceeWrapper return ArceeWrapper def _import_arxiv() -> Any: - from langchain.utilities.arxiv import ArxivAPIWrapper + from langchain_community.utilities.arxiv import ArxivAPIWrapper return ArxivAPIWrapper def _import_awslambda() -> Any: - from langchain.utilities.awslambda import LambdaWrapper + from langchain_community.utilities.awslambda import LambdaWrapper return LambdaWrapper def _import_bibtex() -> Any: - from langchain.utilities.bibtex import BibtexparserWrapper + from langchain_community.utilities.bibtex import BibtexparserWrapper return BibtexparserWrapper def _import_bing_search() -> Any: - from langchain.utilities.bing_search import BingSearchAPIWrapper + from langchain_community.utilities.bing_search import BingSearchAPIWrapper return BingSearchAPIWrapper def _import_brave_search() -> Any: - from langchain.utilities.brave_search import BraveSearchWrapper + from langchain_community.utilities.brave_search import BraveSearchWrapper return BraveSearchWrapper def _import_duckduckgo_search() -> Any: - from langchain.utilities.duckduckgo_search import DuckDuckGoSearchAPIWrapper + from langchain_community.utilities.duckduckgo_search import ( + DuckDuckGoSearchAPIWrapper, + ) return DuckDuckGoSearchAPIWrapper def _import_golden_query() -> Any: - from langchain.utilities.golden_query import GoldenQueryAPIWrapper + from langchain_community.utilities.golden_query import GoldenQueryAPIWrapper return GoldenQueryAPIWrapper def _import_google_lens() -> Any: - from langchain.utilities.google_lens import GoogleLensAPIWrapper + from langchain_community.utilities.google_lens import GoogleLensAPIWrapper return GoogleLensAPIWrapper def _import_google_places_api() -> Any: - from langchain.utilities.google_places_api import GooglePlacesAPIWrapper + from langchain_community.utilities.google_places_api import GooglePlacesAPIWrapper return GooglePlacesAPIWrapper def _import_google_jobs() -> Any: - from langchain.utilities.google_jobs import GoogleJobsAPIWrapper + from langchain_community.utilities.google_jobs import GoogleJobsAPIWrapper return GoogleJobsAPIWrapper def _import_google_scholar() -> Any: - from langchain.utilities.google_scholar import GoogleScholarAPIWrapper + from langchain_community.utilities.google_scholar import GoogleScholarAPIWrapper return GoogleScholarAPIWrapper def _import_google_trends() -> Any: - from langchain.utilities.google_trends import GoogleTrendsAPIWrapper + from langchain_community.utilities.google_trends import GoogleTrendsAPIWrapper return GoogleTrendsAPIWrapper def _import_google_finance() -> Any: - from langchain.utilities.google_finance import GoogleFinanceAPIWrapper + from langchain_community.utilities.google_finance import GoogleFinanceAPIWrapper return GoogleFinanceAPIWrapper def _import_google_search() -> Any: - from langchain.utilities.google_search import GoogleSearchAPIWrapper + from langchain_community.utilities.google_search import GoogleSearchAPIWrapper return GoogleSearchAPIWrapper def _import_google_serper() -> Any: - from langchain.utilities.google_serper import GoogleSerperAPIWrapper + from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper return GoogleSerperAPIWrapper def _import_graphql() -> Any: - from langchain.utilities.graphql import GraphQLAPIWrapper + from langchain_community.utilities.graphql import GraphQLAPIWrapper return GraphQLAPIWrapper def _import_jira() -> Any: - from langchain.utilities.jira import JiraAPIWrapper + from langchain_community.utilities.jira import JiraAPIWrapper return JiraAPIWrapper def _import_max_compute() -> Any: - from langchain.utilities.max_compute import MaxComputeAPIWrapper + from langchain_community.utilities.max_compute import MaxComputeAPIWrapper return MaxComputeAPIWrapper def _import_merriam_webster() -> Any: - from langchain.utilities.merriam_webster import MerriamWebsterAPIWrapper + from langchain_community.utilities.merriam_webster import MerriamWebsterAPIWrapper return MerriamWebsterAPIWrapper def _import_metaphor_search() -> Any: - from langchain.utilities.metaphor_search import MetaphorSearchAPIWrapper + from langchain_community.utilities.metaphor_search import MetaphorSearchAPIWrapper return MetaphorSearchAPIWrapper def _import_openweathermap() -> Any: - from langchain.utilities.openweathermap import OpenWeatherMapAPIWrapper + from langchain_community.utilities.openweathermap import OpenWeatherMapAPIWrapper return OpenWeatherMapAPIWrapper def _import_outline() -> Any: - from langchain.utilities.outline import OutlineAPIWrapper + from langchain_community.utilities.outline import OutlineAPIWrapper return OutlineAPIWrapper def _import_portkey() -> Any: - from langchain.utilities.portkey import Portkey + from langchain_community.utilities.portkey import Portkey return Portkey def _import_powerbi() -> Any: - from langchain.utilities.powerbi import PowerBIDataset + from langchain_community.utilities.powerbi import PowerBIDataset return PowerBIDataset def _import_pubmed() -> Any: - from langchain.utilities.pubmed import PubMedAPIWrapper + from langchain_community.utilities.pubmed import PubMedAPIWrapper return PubMedAPIWrapper def _import_python() -> Any: - from langchain.utilities.python import PythonREPL + from langchain_community.utilities.python import PythonREPL return PythonREPL def _import_scenexplain() -> Any: - from langchain.utilities.scenexplain import SceneXplainAPIWrapper + from langchain_community.utilities.scenexplain import SceneXplainAPIWrapper return SceneXplainAPIWrapper def _import_searchapi() -> Any: - from langchain.utilities.searchapi import SearchApiAPIWrapper + from langchain_community.utilities.searchapi import SearchApiAPIWrapper return SearchApiAPIWrapper def _import_searx_search() -> Any: - from langchain.utilities.searx_search import SearxSearchWrapper + from langchain_community.utilities.searx_search import SearxSearchWrapper return SearxSearchWrapper def _import_serpapi() -> Any: - from langchain.utilities.serpapi import SerpAPIWrapper + from langchain_community.utilities.serpapi import SerpAPIWrapper return SerpAPIWrapper def _import_spark_sql() -> Any: - from langchain.utilities.spark_sql import SparkSQL + from langchain_community.utilities.spark_sql import SparkSQL return SparkSQL def _import_sql_database() -> Any: - from langchain.utilities.sql_database import SQLDatabase + from langchain_community.utilities.sql_database import SQLDatabase return SQLDatabase def _import_steam_webapi() -> Any: - from langchain.utilities.steam import SteamWebAPIWrapper + from langchain_community.utilities.steam import SteamWebAPIWrapper return SteamWebAPIWrapper def _import_stackexchange() -> Any: - from langchain.utilities.stackexchange import StackExchangeAPIWrapper + from langchain_community.utilities.stackexchange import StackExchangeAPIWrapper return StackExchangeAPIWrapper def _import_tensorflow_datasets() -> Any: - from langchain.utilities.tensorflow_datasets import TensorflowDatasets + from langchain_community.utilities.tensorflow_datasets import TensorflowDatasets return TensorflowDatasets def _import_twilio() -> Any: - from langchain.utilities.twilio import TwilioAPIWrapper + from langchain_community.utilities.twilio import TwilioAPIWrapper return TwilioAPIWrapper def _import_wikipedia() -> Any: - from langchain.utilities.wikipedia import WikipediaAPIWrapper + from langchain_community.utilities.wikipedia import WikipediaAPIWrapper return WikipediaAPIWrapper def _import_wolfram_alpha() -> Any: - from langchain.utilities.wolfram_alpha import WolframAlphaAPIWrapper + from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper return WolframAlphaAPIWrapper def _import_zapier() -> Any: - from langchain.utilities.zapier import ZapierNLAWrapper + from langchain_community.utilities.zapier import ZapierNLAWrapper return ZapierNLAWrapper def _import_nasa() -> Any: - from langchain.utilities.nasa import NasaAPIWrapper + from langchain_community.utilities.nasa import NasaAPIWrapper return NasaAPIWrapper diff --git a/libs/langchain/scripts/lint_imports.sh b/libs/langchain/scripts/lint_imports.sh index 7c67f8aa8da..c239577ae13 100755 --- a/libs/langchain/scripts/lint_imports.sh +++ b/libs/langchain/scripts/lint_imports.sh @@ -13,7 +13,7 @@ git grep '^from langchain\.' langchain/utils | grep -vE 'from langchain.(pydanti git grep '^from langchain\.' langchain/schema | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|env)' && errors=$((errors+1)) git grep '^from langchain\.' langchain/adapters | grep -vE 'from langchain.(pydantic_v1|utils|schema|load)' && errors=$((errors+1)) git grep '^from langchain\.' langchain/callbacks | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env)' && errors=$((errors+1)) -# TODO: it's probably not amazing so that so many other modules depend on `langchain.utilities`, because there can be a lot of imports there +# TODO: it's probably not amazing so that so many other modules depend on `langchain_community.utilities`, because there can be a lot of imports there git grep '^from langchain\.' langchain/utilities | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|utilities)' && errors=$((errors+1)) git grep '^from langchain\.' langchain/storage | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|storage|utilities)' && errors=$((errors+1)) git grep '^from langchain\.' langchain/prompts | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|prompts|_api)' && errors=$((errors+1)) diff --git a/libs/langchain/tests/integration_tests/agent/test_powerbi_agent.py b/libs/langchain/tests/integration_tests/agent/test_powerbi_agent.py index caaf71a3b9f..7a661626fe4 100644 --- a/libs/langchain/tests/integration_tests/agent/test_powerbi_agent.py +++ b/libs/langchain/tests/integration_tests/agent/test_powerbi_agent.py @@ -1,8 +1,8 @@ import pytest from langchain_community.agent_toolkits import PowerBIToolkit, create_pbi_agent from langchain_community.chat_models import ChatOpenAI +from langchain_community.utilities.powerbi import PowerBIDataset -from langchain.utilities.powerbi import PowerBIDataset from langchain.utils import get_from_env diff --git a/libs/langchain/tests/integration_tests/chains/test_self_ask_with_search.py b/libs/langchain/tests/integration_tests/chains/test_self_ask_with_search.py index 126e85d8e44..5cf0f93b8cb 100644 --- a/libs/langchain/tests/integration_tests/chains/test_self_ask_with_search.py +++ b/libs/langchain/tests/integration_tests/chains/test_self_ask_with_search.py @@ -1,8 +1,8 @@ """Integration test for self ask with search.""" from langchain_community.llms.openai import OpenAI +from langchain_community.utilities.searchapi import SearchApiAPIWrapper from langchain.agents.self_ask_with_search.base import SelfAskWithSearchChain -from langchain.utilities.searchapi import SearchApiAPIWrapper def test_self_ask_with_search() -> None: diff --git a/libs/langchain/tests/integration_tests/test_dalle.py b/libs/langchain/tests/integration_tests/test_dalle.py index d51cc13ff71..9b765827889 100644 --- a/libs/langchain/tests/integration_tests/test_dalle.py +++ b/libs/langchain/tests/integration_tests/test_dalle.py @@ -1,5 +1,5 @@ """Integration test for DallE API Wrapper.""" -from langchain.utilities.dalle_image_generator import DallEAPIWrapper +from langchain_community.utilities.dalle_image_generator import DallEAPIWrapper def test_call() -> None: diff --git a/libs/langchain/tests/unit_tests/agents/test_sql.py b/libs/langchain/tests/unit_tests/agents/test_sql.py index 85612f564a8..617d9bb7b7f 100644 --- a/libs/langchain/tests/unit_tests/agents/test_sql.py +++ b/libs/langchain/tests/unit_tests/agents/test_sql.py @@ -1,7 +1,7 @@ from langchain_community.agent_toolkits import SQLDatabaseToolkit +from langchain_community.utilities.sql_database import SQLDatabase from langchain.agents import create_sql_agent -from langchain.utilities.sql_database import SQLDatabase from tests.unit_tests.llms.fake_llm import FakeLLM diff --git a/libs/langchain/tests/unit_tests/chains/test_api.py b/libs/langchain/tests/unit_tests/chains/test_api.py index 9134453cdfb..449f713d684 100644 --- a/libs/langchain/tests/unit_tests/chains/test_api.py +++ b/libs/langchain/tests/unit_tests/chains/test_api.py @@ -4,11 +4,11 @@ import json from typing import Any import pytest +from langchain_community.utilities.requests import TextRequestsWrapper from langchain.chains.api.base import APIChain from langchain.chains.api.prompt import API_RESPONSE_PROMPT, API_URL_PROMPT from langchain.chains.llm import LLMChain -from langchain.utilities.requests import TextRequestsWrapper from tests.unit_tests.llms.fake_llm import FakeLLM diff --git a/libs/langchain/tests/unit_tests/test_dependencies.py b/libs/langchain/tests/unit_tests/test_dependencies.py index a887187156e..3b0858c02b6 100644 --- a/libs/langchain/tests/unit_tests/test_dependencies.py +++ b/libs/langchain/tests/unit_tests/test_dependencies.py @@ -99,6 +99,10 @@ def test_imports() -> None: from langchain_community.document_loaders import BSHTMLLoader # noqa: F401 from langchain_community.embeddings import OpenAIEmbeddings # noqa: F401 from langchain_community.llms import OpenAI # noqa: F401 + from langchain_community.utilities import ( + SearchApiAPIWrapper, # noqa: F401 + SerpAPIWrapper, # noqa: F401 + ) from langchain_community.vectorstores import FAISS # noqa: F401 from langchain_core.prompts import BasePromptTemplate # noqa: F401 @@ -107,7 +111,3 @@ def test_imports() -> None: from langchain.chains import LLMChain # noqa: F401 from langchain.retrievers import VespaRetriever # noqa: F401 from langchain.tools import DuckDuckGoSearchResults # noqa: F401 - from langchain.utilities import ( - SearchApiAPIWrapper, # noqa: F401 - SerpAPIWrapper, # noqa: F401 - ) diff --git a/libs/langchain/tests/unit_tests/test_sql_database.py b/libs/langchain/tests/unit_tests/test_sql_database.py index cd1d049d50d..fea54e74c25 100644 --- a/libs/langchain/tests/unit_tests/test_sql_database.py +++ b/libs/langchain/tests/unit_tests/test_sql_database.py @@ -1,6 +1,7 @@ # flake8: noqa=E501 """Test SQL database wrapper.""" +from langchain_community.utilities.sql_database import SQLDatabase, truncate_word from sqlalchemy import ( Column, Integer, @@ -12,8 +13,6 @@ from sqlalchemy import ( insert, ) -from langchain.utilities.sql_database import SQLDatabase, truncate_word - metadata_obj = MetaData() user = Table( diff --git a/libs/langchain/tests/unit_tests/test_sql_database_schema.py b/libs/langchain/tests/unit_tests/test_sql_database_schema.py index b2a6589463d..22f12ab582a 100644 --- a/libs/langchain/tests/unit_tests/test_sql_database_schema.py +++ b/libs/langchain/tests/unit_tests/test_sql_database_schema.py @@ -18,7 +18,7 @@ from sqlalchemy import ( schema, ) -from langchain.utilities.sql_database import SQLDatabase +from langchain_community.utilities.sql_database import SQLDatabase metadata_obj = MetaData() diff --git a/libs/langchain/tests/unit_tests/utilities/test_imports.py b/libs/langchain/tests/unit_tests/utilities/test_imports.py index 49fddcff655..6499d1b857b 100644 --- a/libs/langchain/tests/unit_tests/utilities/test_imports.py +++ b/libs/langchain/tests/unit_tests/utilities/test_imports.py @@ -1,4 +1,4 @@ -from langchain.utilities import __all__ +from langchain_community.utilities import __all__ EXPECTED_ALL = [ "AlphaVantageAPIWrapper", diff --git a/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py b/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py index 092a9433dde..0f0f8c45a89 100644 --- a/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py +++ b/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py @@ -1,7 +1,7 @@ from langchain import hub from langchain.schema import StrOutputParser -from langchain.utilities import WikipediaAPIWrapper from langchain_community.chat_models import ChatAnthropic +from langchain_community.utilities import WikipediaAPIWrapper from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import RunnableLambda, RunnablePassthrough diff --git a/templates/openai-functions-agent/openai_functions_agent/agent.py b/templates/openai-functions-agent/openai_functions_agent/agent.py index 1ecbc715fb4..f6c043fe7e3 100644 --- a/templates/openai-functions-agent/openai_functions_agent/agent.py +++ b/templates/openai-functions-agent/openai_functions_agent/agent.py @@ -4,10 +4,10 @@ from langchain.agents import AgentExecutor from langchain.agents.format_scratchpad import format_to_openai_function_messages from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder -from langchain.utilities.tavily_search import TavilySearchAPIWrapper from langchain_community.chat_models import ChatOpenAI from langchain_community.tools.convert_to_openai import format_tool_to_openai_function from langchain_community.tools.tavily_search import TavilySearchResults +from langchain_community.utilities.tavily_search import TavilySearchAPIWrapper from langchain_core.messages import AIMessage, HumanMessage from langchain_core.pydantic_v1 import BaseModel, Field diff --git a/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py b/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py index faa53263d8d..cbaf97fd713 100644 --- a/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py +++ b/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py @@ -11,11 +11,11 @@ from langchain.prompts import ( MessagesPlaceholder, ) from langchain.schema import Document -from langchain.utilities.tavily_search import TavilySearchAPIWrapper from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.tools.convert_to_openai import format_tool_to_openai_function from langchain_community.tools.tavily_search import TavilySearchResults +from langchain_community.utilities.tavily_search import TavilySearchAPIWrapper from langchain_community.vectorstores import FAISS from langchain_core.messages import AIMessage, HumanMessage from langchain_core.pydantic_v1 import BaseModel, Field diff --git a/templates/research-assistant/research_assistant/search/web.py b/templates/research-assistant/research_assistant/search/web.py index abeacbc2f89..3cd4c3d6d74 100644 --- a/templates/research-assistant/research_assistant/search/web.py +++ b/templates/research-assistant/research_assistant/search/web.py @@ -5,8 +5,8 @@ import requests from bs4 import BeautifulSoup from langchain.prompts import ChatPromptTemplate from langchain.retrievers.tavily_search_api import TavilySearchAPIRetriever -from langchain.utilities import DuckDuckGoSearchAPIWrapper from langchain_community.chat_models import ChatOpenAI +from langchain_community.utilities import DuckDuckGoSearchAPIWrapper from langchain_core.messages import SystemMessage from langchain_core.output_parsers import StrOutputParser from langchain_core.runnables import ( diff --git a/templates/retrieval-agent/retrieval_agent/chain.py b/templates/retrieval-agent/retrieval_agent/chain.py index c516f953e52..70fa9371a79 100644 --- a/templates/retrieval-agent/retrieval_agent/chain.py +++ b/templates/retrieval-agent/retrieval_agent/chain.py @@ -8,9 +8,9 @@ from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain.schema import BaseRetriever, Document from langchain.tools.retriever import create_retriever_tool -from langchain.utilities.arxiv import ArxivAPIWrapper from langchain_community.chat_models import AzureChatOpenAI from langchain_community.tools.convert_to_openai import format_tool_to_openai_function +from langchain_community.utilities.arxiv import ArxivAPIWrapper from langchain_core.messages import AIMessage, HumanMessage from langchain_core.pydantic_v1 import BaseModel, Field diff --git a/templates/rewrite-retrieve-read/rewrite_retrieve_read/chain.py b/templates/rewrite-retrieve-read/rewrite_retrieve_read/chain.py index d66bd04e187..90cee77ee82 100644 --- a/templates/rewrite-retrieve-read/rewrite_retrieve_read/chain.py +++ b/templates/rewrite-retrieve-read/rewrite_retrieve_read/chain.py @@ -1,6 +1,6 @@ from langchain.prompts import ChatPromptTemplate -from langchain.utilities import DuckDuckGoSearchAPIWrapper from langchain_community.chat_models import ChatOpenAI +from langchain_community.utilities import DuckDuckGoSearchAPIWrapper from langchain_core.output_parsers import StrOutputParser from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import RunnablePassthrough diff --git a/templates/sql-llama2/sql_llama2/chain.py b/templates/sql-llama2/sql_llama2/chain.py index b47725e8710..8a254520a77 100644 --- a/templates/sql-llama2/sql_llama2/chain.py +++ b/templates/sql-llama2/sql_llama2/chain.py @@ -1,8 +1,8 @@ from pathlib import Path from langchain.prompts import ChatPromptTemplate -from langchain.utilities import SQLDatabase from langchain_community.llms import Replicate +from langchain_community.utilities import SQLDatabase from langchain_core.output_parsers import StrOutputParser from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import RunnablePassthrough diff --git a/templates/sql-llamacpp/sql_llamacpp/chain.py b/templates/sql-llamacpp/sql_llamacpp/chain.py index 6ca5232b890..2b651187cc5 100644 --- a/templates/sql-llamacpp/sql_llamacpp/chain.py +++ b/templates/sql-llamacpp/sql_llamacpp/chain.py @@ -5,8 +5,8 @@ from pathlib import Path import requests from langchain.memory import ConversationBufferMemory from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder -from langchain.utilities import SQLDatabase from langchain_community.llms import LlamaCpp +from langchain_community.utilities import SQLDatabase from langchain_core.output_parsers import StrOutputParser from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import RunnableLambda, RunnablePassthrough diff --git a/templates/sql-ollama/sql_ollama/chain.py b/templates/sql-ollama/sql_ollama/chain.py index 84a909cf2b3..0cac4d24014 100644 --- a/templates/sql-ollama/sql_ollama/chain.py +++ b/templates/sql-ollama/sql_ollama/chain.py @@ -2,8 +2,8 @@ from pathlib import Path from langchain.memory import ConversationBufferMemory from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder -from langchain.utilities import SQLDatabase from langchain_community.chat_models import ChatOllama +from langchain_community.utilities import SQLDatabase from langchain_core.output_parsers import StrOutputParser from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import RunnableLambda, RunnablePassthrough diff --git a/templates/sql-research-assistant/sql_research_assistant/search/sql.py b/templates/sql-research-assistant/sql_research_assistant/search/sql.py index d12ee51499a..0da48f25a60 100644 --- a/templates/sql-research-assistant/sql_research_assistant/search/sql.py +++ b/templates/sql-research-assistant/sql_research_assistant/search/sql.py @@ -3,8 +3,8 @@ from pathlib import Path from langchain.memory import ConversationBufferMemory from langchain.prompts import ChatPromptTemplate from langchain.pydantic_v1 import BaseModel -from langchain.utilities import SQLDatabase from langchain_community.chat_models import ChatOllama, ChatOpenAI +from langchain_community.utilities import SQLDatabase from langchain_core.output_parsers import StrOutputParser from langchain_core.runnables import RunnablePassthrough diff --git a/templates/sql-research-assistant/sql_research_assistant/search/web.py b/templates/sql-research-assistant/sql_research_assistant/search/web.py index c8618315416..5f83b5934b6 100644 --- a/templates/sql-research-assistant/sql_research_assistant/search/web.py +++ b/templates/sql-research-assistant/sql_research_assistant/search/web.py @@ -4,8 +4,8 @@ from typing import Any import requests from bs4 import BeautifulSoup from langchain.prompts import ChatPromptTemplate -from langchain.utilities import DuckDuckGoSearchAPIWrapper from langchain_community.chat_models import ChatOpenAI +from langchain_community.utilities import DuckDuckGoSearchAPIWrapper from langchain_core.messages import SystemMessage from langchain_core.output_parsers import StrOutputParser from langchain_core.runnables import ( diff --git a/templates/stepback-qa-prompting/stepback_qa_prompting/chain.py b/templates/stepback-qa-prompting/stepback_qa_prompting/chain.py index ef68c405eac..a1fc37b1c72 100644 --- a/templates/stepback-qa-prompting/stepback_qa_prompting/chain.py +++ b/templates/stepback-qa-prompting/stepback_qa_prompting/chain.py @@ -1,6 +1,6 @@ from langchain.prompts import ChatPromptTemplate, FewShotChatMessagePromptTemplate -from langchain.utilities import DuckDuckGoSearchAPIWrapper from langchain_community.chat_models import ChatOpenAI +from langchain_community.utilities import DuckDuckGoSearchAPIWrapper from langchain_core.output_parsers import StrOutputParser from langchain_core.runnables import RunnableLambda