Harrison/stop importing from init (#10690)

This commit is contained in:
Harrison Chase
2023-09-16 17:22:48 -07:00
committed by GitHub
parent 9749f8ebae
commit 5442d2b1fa
202 changed files with 321 additions and 327 deletions

View File

@@ -97,7 +97,7 @@
},
"outputs": [],
"source": [
"from langchain import SerpAPIWrapper\n",
"from langchain.utilities import SerpAPIWrapper\n",
"from langchain.agents import initialize_agent, Tool\n",
"from langchain.agents import AgentType\n",
"from langchain.chat_models import ChatOpenAI\n",

View File

@@ -468,7 +468,8 @@
}
],
"source": [
"from langchain import PromptTemplate, LLMChain\n",
"from langchain.prompts import PromptTemplate\n",
"from langchain.chains import LLMChain\n",
"from langchain.chains.prompt_selector import ConditionalPromptSelector\n",
"\n",
"DEFAULT_LLAMA_SEARCH_PROMPT = PromptTemplate(\n",
@@ -593,7 +594,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.1"
"version": "3.10.1"
}
},
"nbformat": 4,

View File

@@ -19,7 +19,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain import LLMChain, OpenAI, Cohere, HuggingFaceHub, PromptTemplate\n",
"from langchain.chains import LLMChain\nfrom langchain.llms import OpenAI, Cohere, HuggingFaceHub\nfrom langchain.prompts import PromptTemplate\n",
"from langchain.model_laboratory import ModelLaboratory"
]
},
@@ -139,7 +139,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain import SelfAskWithSearchChain, SerpAPIWrapper\n",
"from langchain.chains import SelfAskWithSearchChain\nfrom langchain.utilities import SerpAPIWrapper\n",
"\n",
"open_ai_llm = OpenAI(temperature=0)\n",
"search = SerpAPIWrapper()\n",

View File

@@ -95,7 +95,7 @@
},
"outputs": [],
"source": [
"from langchain import PromptTemplate, LLMChain\n",
"from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n",
"from langchain.llms.fake import FakeListLLM\n",
"from langchain_experimental.comprehend_moderation.base_moderation_exceptions import ModerationPiiError\n",
"\n",
@@ -399,7 +399,7 @@
},
"outputs": [],
"source": [
"from langchain import PromptTemplate, LLMChain\n",
"from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n",
"from langchain.llms.fake import FakeListLLM\n",
"\n",
"template = \"\"\"Question: {question}\n",
@@ -564,8 +564,8 @@
},
"outputs": [],
"source": [
"from langchain import HuggingFaceHub\n",
"from langchain import PromptTemplate, LLMChain\n",
"from langchain.llms import HuggingFaceHub\n",
"from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n",
"\n",
"template = \"\"\"Question: {question}\n",
"\n",
@@ -679,7 +679,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain import SagemakerEndpoint\n",
"from langchain.llms import SagemakerEndpoint\n",
"from langchain.llms.sagemaker_endpoint import LLMContentHandler\n",
"from langchain.chains import LLMChain\n",
"from langchain.prompts import load_prompt, PromptTemplate\n",

View File

@@ -123,7 +123,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain import OpenAI\n",
"from langchain.llms import OpenAI\n",
"from langchain.agents import initialize_agent, AgentType"
]
},