mirror of
https://github.com/hwchase17/langchain.git
synced 2025-04-27 11:41:51 +00:00
docs, community[patch], experimental[patch], langchain[patch], cli[pa… (#15412)
…tch]: import models from community ran ```bash git grep -l 'from langchain\.chat_models' | xargs -L 1 sed -i '' "s/from\ langchain\.chat_models/from\ langchain_community.chat_models/g" git grep -l 'from langchain\.llms' | xargs -L 1 sed -i '' "s/from\ langchain\.llms/from\ langchain_community.llms/g" git grep -l 'from langchain\.embeddings' | xargs -L 1 sed -i '' "s/from\ langchain\.embeddings/from\ langchain_community.embeddings/g" git checkout master libs/langchain/tests/unit_tests/llms git checkout master libs/langchain/tests/unit_tests/chat_models git checkout master libs/langchain/tests/unit_tests/embeddings/test_imports.py make format cd libs/langchain; make format cd ../experimental; make format cd ../core; make format ```
This commit is contained in:
parent
9cbf14dec2
commit
480626dc99
@ -61,13 +61,13 @@
|
||||
],
|
||||
"source": [
|
||||
"# Local\n",
|
||||
"from langchain.chat_models import ChatOllama\n",
|
||||
"from langchain_community.chat_models import ChatOllama\n",
|
||||
"\n",
|
||||
"llama2_chat = ChatOllama(model=\"llama2:13b-chat\")\n",
|
||||
"llama2_code = ChatOllama(model=\"codellama:7b-instruct\")\n",
|
||||
"\n",
|
||||
"# API\n",
|
||||
"from langchain.llms import Replicate\n",
|
||||
"from langchain_community.llms import Replicate\n",
|
||||
"\n",
|
||||
"# REPLICATE_API_TOKEN = getpass()\n",
|
||||
"# os.environ[\"REPLICATE_API_TOKEN\"] = REPLICATE_API_TOKEN\n",
|
||||
|
@ -198,8 +198,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"\n",
|
||||
"\n",
|
||||
@ -353,10 +353,10 @@
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.documents import Document\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
@ -158,9 +158,9 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatVertexAI\n",
|
||||
"from langchain.llms import VertexAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatVertexAI\n",
|
||||
"from langchain_community.llms import VertexAI\n",
|
||||
"from langchain_core.messages import AIMessage\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnableLambda\n",
|
||||
@ -342,10 +342,10 @@
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import VertexAIEmbeddings\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain_community.embeddings import VertexAIEmbeddings\n",
|
||||
"from langchain_core.documents import Document\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
@ -235,8 +235,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser"
|
||||
]
|
||||
},
|
||||
@ -318,10 +318,10 @@
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.documents import Document\n",
|
||||
"\n",
|
||||
"# The vectorstore to use to index the child chunks\n",
|
||||
|
@ -211,8 +211,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser"
|
||||
]
|
||||
},
|
||||
@ -373,10 +373,10 @@
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.documents import Document\n",
|
||||
"\n",
|
||||
"# The vectorstore to use to index the child chunks\n",
|
||||
|
@ -209,8 +209,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOllama\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOllama\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser"
|
||||
]
|
||||
},
|
||||
@ -376,10 +376,10 @@
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import GPT4AllEmbeddings\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain_community.embeddings import GPT4AllEmbeddings\n",
|
||||
"from langchain_core.documents import Document\n",
|
||||
"\n",
|
||||
"# The vectorstore to use to index the child chunks\n",
|
||||
|
@ -132,8 +132,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"\n",
|
||||
"baseline = Chroma.from_texts(\n",
|
||||
" texts=all_splits_pypdf_texts,\n",
|
||||
@ -160,8 +160,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"\n",
|
||||
"# Prompt\n",
|
||||
|
@ -28,10 +28,10 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import RetrievalQA\n",
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.text_splitter import CharacterTextSplitter\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain_community.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0)"
|
||||
]
|
||||
@ -161,7 +161,7 @@
|
||||
"source": [
|
||||
"# Import things that are needed generically\n",
|
||||
"from langchain.agents import AgentType, Tool, initialize_agent\n",
|
||||
"from langchain.llms import OpenAI"
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -29,7 +29,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import AnalyzeDocumentChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(model=\"gpt-3.5-turbo\", temperature=0)"
|
||||
]
|
||||
|
@ -62,8 +62,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.docstore import InMemoryDocstore\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -100,7 +100,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_experimental.autonomous_agents import AutoGPT"
|
||||
]
|
||||
},
|
||||
|
@ -40,8 +40,8 @@
|
||||
"import nest_asyncio\n",
|
||||
"import pandas as pd\n",
|
||||
"from langchain.agents.agent_toolkits.pandas.base import create_pandas_dataframe_agent\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.docstore.document import Document\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_experimental.autonomous_agents import AutoGPT\n",
|
||||
"\n",
|
||||
"# Needed synce jupyter runs an async eventloop\n",
|
||||
@ -311,8 +311,8 @@
|
||||
"# Memory\n",
|
||||
"import faiss\n",
|
||||
"from langchain.docstore import InMemoryDocstore\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"\n",
|
||||
"embeddings_model = OpenAIEmbeddings()\n",
|
||||
"embedding_size = 1536\n",
|
||||
|
@ -31,8 +31,8 @@
|
||||
"source": [
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_experimental.autonomous_agents import BabyAGI"
|
||||
]
|
||||
},
|
||||
|
@ -28,9 +28,9 @@
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_experimental.autonomous_agents import BabyAGI"
|
||||
]
|
||||
},
|
||||
@ -108,8 +108,8 @@
|
||||
"source": [
|
||||
"from langchain.agents import AgentExecutor, Tool, ZeroShotAgent\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.utilities import SerpAPIWrapper\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"todo_prompt = PromptTemplate.from_template(\n",
|
||||
" \"You are a planner who is an expert at coming up with a todo list for a given objective. Come up with a todo list for this objective: {objective}\"\n",
|
||||
|
@ -36,7 +36,6 @@
|
||||
"source": [
|
||||
"from typing import List\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts.chat import (\n",
|
||||
" HumanMessagePromptTemplate,\n",
|
||||
" SystemMessagePromptTemplate,\n",
|
||||
@ -46,7 +45,8 @@
|
||||
" BaseMessage,\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
")"
|
||||
")\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -47,7 +47,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from IPython.display import SVG\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_experimental.cpal.base import CPALChain\n",
|
||||
"from langchain_experimental.pal_chain import PALChain\n",
|
||||
"\n",
|
||||
|
@ -657,7 +657,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain_community.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"\n",
|
||||
"embeddings = OpenAIEmbeddings()\n",
|
||||
"embeddings"
|
||||
@ -834,7 +834,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import ConversationalRetrievalChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI(\n",
|
||||
" model_name=\"gpt-3.5-turbo-0613\"\n",
|
||||
|
@ -42,10 +42,10 @@
|
||||
")\n",
|
||||
"from langchain.agents.agent_toolkits import NLAToolkit\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import StringPromptTemplate\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish\n",
|
||||
"from langchain.tools.plugin import AIPlugin"
|
||||
"from langchain.tools.plugin import AIPlugin\n",
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -114,9 +114,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.schema import Document\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -67,10 +67,10 @@
|
||||
")\n",
|
||||
"from langchain.agents.agent_toolkits import NLAToolkit\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import StringPromptTemplate\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish\n",
|
||||
"from langchain.tools.plugin import AIPlugin"
|
||||
"from langchain.tools.plugin import AIPlugin\n",
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -138,9 +138,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.schema import Document\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -39,10 +39,10 @@
|
||||
" Tool,\n",
|
||||
")\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import StringPromptTemplate\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish\n",
|
||||
"from langchain.utilities import SerpAPIWrapper"
|
||||
"from langchain.utilities import SerpAPIWrapper\n",
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -103,9 +103,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.schema import Document\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -93,7 +93,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Creating a OpenAI Chat LLM wrapper\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(temperature=0, model_name=\"gpt-4\")"
|
||||
]
|
||||
|
@ -52,13 +52,13 @@
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.chains import RetrievalQA\n",
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.text_splitter import (\n",
|
||||
" CharacterTextSplitter,\n",
|
||||
" RecursiveCharacterTextSplitter,\n",
|
||||
")\n",
|
||||
"from langchain.vectorstores import DeepLake\n",
|
||||
"from langchain_community.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"OpenAI API Key:\")\n",
|
||||
"activeloop_token = getpass.getpass(\"Activeloop Token:\")\n",
|
||||
|
@ -470,12 +470,12 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import (\n",
|
||||
" ChatPromptTemplate,\n",
|
||||
" HumanMessagePromptTemplate,\n",
|
||||
" SystemMessagePromptTemplate,\n",
|
||||
")\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser"
|
||||
]
|
||||
},
|
||||
@ -545,10 +545,10 @@
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.vectorstores.chroma import Chroma\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.documents import Document\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
@ -39,7 +39,7 @@
|
||||
"source": [
|
||||
"from elasticsearch import Elasticsearch\n",
|
||||
"from langchain.chains.elasticsearch_database import ElasticsearchDatabaseChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI"
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -22,7 +22,7 @@
|
||||
"from typing import List, Optional\n",
|
||||
"\n",
|
||||
"from langchain.chains.openai_tools import create_extraction_chain_pydantic\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.pydantic_v1 import BaseModel"
|
||||
]
|
||||
},
|
||||
|
@ -20,7 +20,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms.fake import FakeListLLM"
|
||||
"from langchain_community.llms.fake import FakeListLLM"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -73,10 +73,10 @@
|
||||
" AsyncCallbackManagerForRetrieverRun,\n",
|
||||
" CallbackManagerForRetrieverRun,\n",
|
||||
")\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.schema import BaseRetriever, Document\n",
|
||||
"from langchain.utilities import GoogleSerperAPIWrapper"
|
||||
"from langchain.utilities import GoogleSerperAPIWrapper\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -47,11 +47,11 @@
|
||||
"from datetime import datetime, timedelta\n",
|
||||
"from typing import List\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.docstore import InMemoryDocstore\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.retrievers import TimeWeightedVectorStoreRetriever\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from termcolor import colored"
|
||||
]
|
||||
},
|
||||
|
@ -75,7 +75,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_experimental.autonomous_agents import HuggingGPT\n",
|
||||
"\n",
|
||||
"# %env OPENAI_API_BASE=http://localhost:8000/v1"
|
||||
|
@ -159,7 +159,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import AgentType, initialize_agent, load_tools\n",
|
||||
"from langchain.llms import OpenAI"
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -20,7 +20,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models.human import HumanInputChatModel"
|
||||
"from langchain_community.chat_models.human import HumanInputChatModel"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -19,7 +19,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms.human import HumanInputLLM"
|
||||
"from langchain_community.llms.human import HumanInputLLM"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -21,9 +21,9 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import HypotheticalDocumentEmbedder, LLMChain\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate"
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -49,7 +49,7 @@
|
||||
"source": [
|
||||
"# pick and configure the LLM of your choice\n",
|
||||
"\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(model=\"gpt-3.5-turbo-instruct\")"
|
||||
]
|
||||
|
@ -43,7 +43,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_experimental.llm_bash.base import LLMBashChain\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0)\n",
|
||||
|
@ -42,7 +42,7 @@
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chains import LLMCheckerChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0.7)\n",
|
||||
"\n",
|
||||
|
@ -46,7 +46,7 @@
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chains import LLMMathChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0)\n",
|
||||
"llm_math = LLMMathChain.from_llm(llm, verbose=True)\n",
|
||||
|
@ -331,7 +331,7 @@
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chains import LLMSummarizationCheckerChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0)\n",
|
||||
"checker_chain = LLMSummarizationCheckerChain.from_llm(llm, verbose=True, max_checks=2)\n",
|
||||
@ -822,7 +822,7 @@
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chains import LLMSummarizationCheckerChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0)\n",
|
||||
"checker_chain = LLMSummarizationCheckerChain.from_llm(llm, verbose=True, max_checks=3)\n",
|
||||
@ -1096,7 +1096,7 @@
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chains import LLMSummarizationCheckerChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0)\n",
|
||||
"checker_chain = LLMSummarizationCheckerChain.from_llm(llm, max_checks=3, verbose=True)\n",
|
||||
|
@ -14,7 +14,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_experimental.llm_symbolic_math.base import LLMSymbolicMathChain\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0)\n",
|
||||
|
@ -57,9 +57,9 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.memory import ConversationBufferWindowMemory\n",
|
||||
"from langchain.prompts import PromptTemplate"
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -91,7 +91,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.messages import HumanMessage, SystemMessage"
|
||||
]
|
||||
},
|
||||
|
@ -315,7 +315,7 @@
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.messages import HumanMessage, SystemMessage\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnableLambda, RunnablePassthrough\n",
|
||||
|
@ -43,8 +43,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import AgentType, initialize_agent\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.tools import SteamshipImageGenerationTool"
|
||||
"from langchain.tools import SteamshipImageGenerationTool\n",
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -28,11 +28,11 @@
|
||||
"source": [
|
||||
"from typing import Callable, List\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
")"
|
||||
")\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -33,7 +33,6 @@
|
||||
"from typing import Callable, List\n",
|
||||
"\n",
|
||||
"import tenacity\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.output_parsers import RegexParser\n",
|
||||
"from langchain.prompts import (\n",
|
||||
" PromptTemplate,\n",
|
||||
@ -41,7 +40,8 @@
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
")"
|
||||
")\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -27,13 +27,13 @@
|
||||
"from typing import Callable, List\n",
|
||||
"\n",
|
||||
"import tenacity\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.output_parsers import RegexParser\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
")"
|
||||
")\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -31,9 +31,9 @@
|
||||
"from os import environ\n",
|
||||
"\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.utilities import SQLDatabase\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain\n",
|
||||
"from sqlalchemy import MetaData, create_engine\n",
|
||||
"\n",
|
||||
@ -57,7 +57,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.embeddings import HuggingFaceInstructEmbeddings\n",
|
||||
"from langchain_community.embeddings import HuggingFaceInstructEmbeddings\n",
|
||||
"from langchain_experimental.sql.vector_sql import VectorSQLOutputParser\n",
|
||||
"\n",
|
||||
"output_parser = VectorSQLOutputParser.from_embeddings(\n",
|
||||
@ -75,8 +75,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.callbacks import StdOutCallbackHandler\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.utilities.sql_database import SQLDatabase\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_experimental.sql.prompt import MYSCALE_PROMPT\n",
|
||||
"from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain\n",
|
||||
"\n",
|
||||
@ -117,7 +117,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_experimental.retrievers.vector_sql_database import (\n",
|
||||
" VectorSQLDatabaseChainRetriever,\n",
|
||||
")\n",
|
||||
|
@ -21,9 +21,9 @@
|
||||
"source": [
|
||||
"from langchain.chains import RetrievalQA\n",
|
||||
"from langchain.document_loaders import TextLoader\n",
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.text_splitter import CharacterTextSplitter\n",
|
||||
"from langchain.vectorstores import Chroma"
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain_community.embeddings.openai import OpenAIEmbeddings"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -52,8 +52,8 @@
|
||||
"source": [
|
||||
"from langchain.chains import create_qa_with_sources_chain\n",
|
||||
"from langchain.chains.combine_documents.stuff import StuffDocumentsChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate"
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -28,7 +28,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.messages import HumanMessage, SystemMessage"
|
||||
]
|
||||
},
|
||||
@ -414,7 +414,7 @@
|
||||
"BREAKING CHANGES:\n",
|
||||
"- To use Azure embeddings with OpenAI V1, you'll need to use the new `AzureOpenAIEmbeddings` instead of the existing `OpenAIEmbeddings`. `OpenAIEmbeddings` continue to work when using Azure with `openai<1`.\n",
|
||||
"```python\n",
|
||||
"from langchain.embeddings import AzureOpenAIEmbeddings\n",
|
||||
"from langchain_community.embeddings import AzureOpenAIEmbeddings\n",
|
||||
"```\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
@ -47,12 +47,12 @@
|
||||
"import inspect\n",
|
||||
"\n",
|
||||
"import tenacity\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.output_parsers import RegexParser\n",
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
")"
|
||||
")\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -30,9 +30,9 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import LLMMathChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.utilities import DuckDuckGoSearchAPIWrapper\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_core.tools import Tool\n",
|
||||
"from langchain_experimental.plan_and_execute import (\n",
|
||||
" PlanAndExecute,\n",
|
||||
|
@ -81,8 +81,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import ConversationalRetrievalChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.retrievers import KayAiRetriever\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI(model_name=\"gpt-3.5-turbo\")\n",
|
||||
"retriever = KayAiRetriever.create(\n",
|
||||
|
@ -17,7 +17,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_experimental.pal_chain import PALChain"
|
||||
]
|
||||
},
|
||||
|
@ -27,7 +27,7 @@
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chains import create_citation_fuzzy_match_chain\n",
|
||||
"from langchain.chat_models import ChatOpenAI"
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -60,10 +60,10 @@
|
||||
"from baidubce.bce_client_configuration import BceClientConfiguration\n",
|
||||
"from langchain.chains.retrieval_qa import RetrievalQA\n",
|
||||
"from langchain.document_loaders.baiducloud_bos_directory import BaiduBOSDirectoryLoader\n",
|
||||
"from langchain.embeddings.huggingface import HuggingFaceEmbeddings\n",
|
||||
"from langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint\n",
|
||||
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
||||
"from langchain.vectorstores import BESVectorStore"
|
||||
"from langchain.vectorstores import BESVectorStore\n",
|
||||
"from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings\n",
|
||||
"from langchain_community.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -30,8 +30,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pinecone\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import Pinecone\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"\n",
|
||||
"pinecone.init(api_key=\"...\", environment=\"...\")"
|
||||
]
|
||||
@ -86,7 +86,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser"
|
||||
]
|
||||
},
|
||||
|
@ -42,8 +42,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.sql_database import SQLDatabase\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"CONNECTION_STRING = \"postgresql+psycopg2://postgres:test@localhost:5432/vectordb\" # Replace with your own\n",
|
||||
"db = SQLDatabase.from_uri(CONNECTION_STRING)"
|
||||
@ -88,7 +88,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"\n",
|
||||
"embeddings_model = OpenAIEmbeddings()"
|
||||
]
|
||||
@ -267,7 +267,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnablePassthrough\n",
|
||||
"\n",
|
||||
|
@ -31,9 +31,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.utilities import DuckDuckGoSearchAPIWrapper\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnablePassthrough"
|
||||
]
|
||||
|
@ -49,14 +49,14 @@
|
||||
"from langchain.agents.conversational.prompt import FORMAT_INSTRUCTIONS\n",
|
||||
"from langchain.chains import LLMChain, RetrievalQA\n",
|
||||
"from langchain.chains.base import Chain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.llms import BaseLLM, OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.prompts.base import StringPromptTemplate\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish\n",
|
||||
"from langchain.text_splitter import CharacterTextSplitter\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain_community.llms import BaseLLM, OpenAI\n",
|
||||
"from pydantic import BaseModel, Field"
|
||||
]
|
||||
},
|
||||
|
@ -17,8 +17,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.prompt_values import PromptValue"
|
||||
]
|
||||
|
@ -255,7 +255,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI(model=\"gpt-4\")\n",
|
||||
"res = model.predict(\n",
|
||||
@ -1083,8 +1083,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import ElasticsearchStore\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"\n",
|
||||
"embeddings = OpenAIEmbeddings()"
|
||||
]
|
||||
|
@ -24,10 +24,10 @@
|
||||
"source": [
|
||||
"from langchain.agents import AgentExecutor, Tool, ZeroShotAgent\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.memory import ConversationBufferMemory, ReadOnlySharedMemory\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.utilities import GoogleSearchAPIWrapper"
|
||||
"from langchain.utilities import GoogleSearchAPIWrapper\n",
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -51,8 +51,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_experimental.smart_llm import SmartLLMChain"
|
||||
]
|
||||
},
|
||||
|
@ -9,7 +9,7 @@ To set it up, follow the instructions on https://database.guide/2-sample-databas
|
||||
|
||||
|
||||
```python
|
||||
from langchain.llms import OpenAI
|
||||
from langchain_community.llms import OpenAI
|
||||
from langchain.utilities import SQLDatabase
|
||||
from langchain_experimental.sql import SQLDatabaseChain
|
||||
```
|
||||
@ -200,7 +200,7 @@ result["intermediate_steps"]
|
||||
How to add memory to a SQLDatabaseChain:
|
||||
|
||||
```python
|
||||
from langchain.llms import OpenAI
|
||||
from langchain_community.llms import OpenAI
|
||||
from langchain.utilities import SQLDatabase
|
||||
from langchain_experimental.sql import SQLDatabaseChain
|
||||
```
|
||||
@ -647,7 +647,7 @@ Sometimes you may not have the luxury of using OpenAI or other service-hosted la
|
||||
import logging
|
||||
import torch
|
||||
from transformers import AutoTokenizer, GPT2TokenizerFast, pipeline, AutoModelForSeq2SeqLM, AutoModelForCausalLM
|
||||
from langchain.llms import HuggingFacePipeline
|
||||
from langchain_community.llms import HuggingFacePipeline
|
||||
|
||||
# Note: This model requires a large GPU, e.g. an 80GB A100. See documentation for other ways to run private non-OpenAI models.
|
||||
model_id = "google/flan-ul2"
|
||||
@ -994,7 +994,7 @@ Now that you have some examples (with manually corrected output SQL), you can do
|
||||
```python
|
||||
from langchain.prompts import FewShotPromptTemplate, PromptTemplate
|
||||
from langchain.chains.sql_database.prompt import _sqlite_prompt, PROMPT_SUFFIX
|
||||
from langchain.embeddings.huggingface import HuggingFaceEmbeddings
|
||||
from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings
|
||||
from langchain.prompts.example_selector.semantic_similarity import SemanticSimilarityExampleSelector
|
||||
from langchain.vectorstores import Chroma
|
||||
|
||||
|
@ -23,8 +23,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate, FewShotChatMessagePromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnableLambda"
|
||||
]
|
||||
|
@ -24,7 +24,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=1, max_tokens=512, model=\"gpt-3.5-turbo-instruct\")"
|
||||
]
|
||||
|
@ -37,8 +37,8 @@
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import DeepLake\n",
|
||||
"from langchain_community.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"OpenAI API Key:\")\n",
|
||||
"activeloop_token = getpass.getpass(\"Activeloop Token:\")\n",
|
||||
@ -3809,7 +3809,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import ConversationalRetrievalChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI(model_name=\"gpt-3.5-turbo-0613\") # switch to 'gpt-4'\n",
|
||||
"qa = ConversationalRetrievalChain.from_llm(model, retriever=retriever)"
|
||||
|
@ -24,13 +24,13 @@
|
||||
"source": [
|
||||
"from typing import Callable, List\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.memory import ConversationBufferMemory\n",
|
||||
"from langchain.schema import (\n",
|
||||
" AIMessage,\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
")"
|
||||
")\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -24,11 +24,11 @@
|
||||
"source": [
|
||||
"from typing import Callable, List\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
")"
|
||||
")\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -599,7 +599,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(model_name=\"gpt-4\", temperature=0)"
|
||||
]
|
||||
|
6
docs/docs/_templates/integration.mdx
vendored
6
docs/docs/_templates/integration.mdx
vendored
@ -32,7 +32,7 @@ There isn't any special setup for it.
|
||||
See a [usage example](/docs/integrations/llms/INCLUDE_REAL_NAME).
|
||||
|
||||
```python
|
||||
from langchain.llms import integration_class_REPLACE_ME
|
||||
from langchain_community.llms import integration_class_REPLACE_ME
|
||||
```
|
||||
|
||||
## Text Embedding Models
|
||||
@ -40,7 +40,7 @@ from langchain.llms import integration_class_REPLACE_ME
|
||||
See a [usage example](/docs/integrations/text_embedding/INCLUDE_REAL_NAME)
|
||||
|
||||
```python
|
||||
from langchain.embeddings import integration_class_REPLACE_ME
|
||||
from langchain_community.embeddings import integration_class_REPLACE_ME
|
||||
```
|
||||
|
||||
## Chat models
|
||||
@ -48,7 +48,7 @@ from langchain.embeddings import integration_class_REPLACE_ME
|
||||
See a [usage example](/docs/integrations/chat/INCLUDE_REAL_NAME)
|
||||
|
||||
```python
|
||||
from langchain.chat_models import integration_class_REPLACE_ME
|
||||
from langchain_community.chat_models import integration_class_REPLACE_ME
|
||||
```
|
||||
|
||||
## Document Loader
|
||||
|
@ -20,7 +20,7 @@
|
||||
"from langchain import hub\n",
|
||||
"from langchain.agents import AgentExecutor, tool\n",
|
||||
"from langchain.agents.output_parsers import XMLAgentOutputParser\n",
|
||||
"from langchain.chat_models import ChatAnthropic"
|
||||
"from langchain_community.chat_models import ChatAnthropic"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -17,10 +17,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import (\n",
|
||||
" ChatPromptTemplate,\n",
|
||||
")\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_experimental.utilities import PythonREPL"
|
||||
]
|
||||
|
@ -19,10 +19,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.utils.math import cosine_similarity\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnableLambda, RunnablePassthrough\n",
|
||||
"\n",
|
||||
|
@ -19,9 +19,9 @@
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.memory import ConversationBufferMemory\n",
|
||||
"from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.runnables import RunnableLambda, RunnablePassthrough\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI()\n",
|
||||
|
@ -18,8 +18,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import OpenAIModerationChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate"
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -39,9 +39,9 @@
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.schema import StrOutputParser\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"prompt1 = ChatPromptTemplate.from_template(\"what is the city {person} is from?\")\n",
|
||||
"prompt2 = ChatPromptTemplate.from_template(\n",
|
||||
|
@ -42,8 +42,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"prompt = ChatPromptTemplate.from_template(\"tell me a joke about {foo}\")\n",
|
||||
"model = ChatOpenAI()\n",
|
||||
|
@ -26,12 +26,12 @@
|
||||
"from langchain.agents import AgentExecutor, load_tools\n",
|
||||
"from langchain.agents.format_scratchpad import format_to_openai_function_messages\n",
|
||||
"from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
|
||||
"from langchain.prompts.chat import ChatPromptValue\n",
|
||||
"from langchain.tools import WikipediaQueryRun\n",
|
||||
"from langchain.tools.render import format_tool_to_openai_function\n",
|
||||
"from langchain.utilities import WikipediaAPIWrapper"
|
||||
"from langchain.utilities import WikipediaAPIWrapper\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -38,10 +38,10 @@
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnableLambda, RunnablePassthrough"
|
||||
]
|
||||
|
@ -93,7 +93,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnablePassthrough\n",
|
||||
"\n",
|
||||
|
@ -27,9 +27,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.tools import DuckDuckGoSearchRun\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser"
|
||||
]
|
||||
},
|
||||
|
@ -48,8 +48,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"\n",
|
||||
"prompt = ChatPromptTemplate.from_template(\"tell me a short joke about {topic}\")\n",
|
||||
@ -209,7 +209,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(model=\"gpt-3.5-turbo-instruct\")\n",
|
||||
"llm.invoke(prompt_value)"
|
||||
@ -324,10 +324,10 @@
|
||||
"# Requires:\n",
|
||||
"# pip install langchain docarray tiktoken\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.vectorstores import DocArrayInMemorySearch\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnableParallel, RunnablePassthrough\n",
|
||||
"\n",
|
||||
|
@ -19,9 +19,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.schema import StrOutputParser\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.runnables import RunnablePassthrough"
|
||||
]
|
||||
},
|
||||
|
@ -41,8 +41,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.runnables import ConfigurableField\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI(temperature=0).configurable_fields(\n",
|
||||
@ -263,8 +263,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatAnthropic, ChatOpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatAnthropic, ChatOpenAI\n",
|
||||
"from langchain_core.runnables import ConfigurableField"
|
||||
]
|
||||
},
|
||||
|
@ -31,7 +31,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatAnthropic, ChatOpenAI"
|
||||
"from langchain_community.chat_models import ChatAnthropic, ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -240,8 +240,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Now lets create a chain with the normal OpenAI model\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"prompt_template = \"\"\"Instructions: You should always include a compliment in your response.\n",
|
||||
"\n",
|
||||
|
@ -33,8 +33,8 @@
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.runnables import RunnableLambda\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
@ -32,8 +32,8 @@
|
||||
"source": [
|
||||
"from typing import Iterator, List\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts.chat import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"\n",
|
||||
"prompt = ChatPromptTemplate.from_template(\n",
|
||||
|
@ -44,10 +44,10 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnablePassthrough\n",
|
||||
"\n",
|
||||
@ -128,10 +128,10 @@
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnablePassthrough\n",
|
||||
"\n",
|
||||
@ -192,8 +192,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_core.runnables import RunnableParallel\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI()\n",
|
||||
|
@ -131,9 +131,9 @@
|
||||
"source": [
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatAnthropic\n",
|
||||
"from langchain.memory.chat_message_histories import RedisChatMessageHistory\n",
|
||||
"from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
|
||||
"from langchain_community.chat_models import ChatAnthropic\n",
|
||||
"from langchain_core.chat_history import BaseChatMessageHistory\n",
|
||||
"from langchain_core.runnables.history import RunnableWithMessageHistory"
|
||||
]
|
||||
|
@ -97,10 +97,10 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnablePassthrough\n",
|
||||
"\n",
|
||||
|
@ -2,6 +2,7 @@
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "9e45e81c-e16e-4c6c-b6a3-2362e5193827",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"---\n",
|
||||
@ -51,8 +52,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatAnthropic\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatAnthropic\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser"
|
||||
]
|
||||
},
|
||||
|
@ -57,8 +57,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI()\n",
|
||||
"prompt = ChatPromptTemplate.from_template(\"tell me a joke about {topic}\")\n",
|
||||
@ -659,8 +659,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain_community.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.runnables import RunnablePassthrough\n",
|
||||
"\n",
|
||||
|
@ -42,7 +42,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"\n",
|
||||
@ -389,7 +389,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(model=\"gpt-3.5-turbo-instruct\")\n",
|
||||
"llm_chain = (\n",
|
||||
@ -468,7 +468,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatAnthropic\n",
|
||||
"from langchain_community.chat_models import ChatAnthropic\n",
|
||||
"\n",
|
||||
"anthropic = ChatAnthropic(model=\"claude-2\")\n",
|
||||
"anthropic_chain = (\n",
|
||||
@ -1002,8 +1002,8 @@
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatAnthropic, ChatOpenAI\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_community.chat_models import ChatAnthropic, ChatOpenAI\n",
|
||||
"from langchain_community.llms import OpenAI\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_core.runnables import RunnablePassthrough\n",
|
||||
|
@ -85,7 +85,7 @@ export OPENAI_API_KEY="..."
|
||||
We can then initialize the model:
|
||||
|
||||
```python
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
|
||||
llm = ChatOpenAI()
|
||||
```
|
||||
@ -93,7 +93,7 @@ llm = ChatOpenAI()
|
||||
If you'd prefer not to set an environment variable you can pass the key in directly via the `openai_api_key` named parameter when initiating the OpenAI LLM class:
|
||||
|
||||
```python
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
|
||||
llm = ChatOpenAI(openai_api_key="...")
|
||||
```
|
||||
@ -110,7 +110,7 @@ First, follow [these instructions](https://github.com/jmorganca/ollama) to set u
|
||||
|
||||
Then, make sure the Ollama server is running. After that, you can do:
|
||||
```python
|
||||
from langchain.llms import Ollama
|
||||
from langchain_community.llms import Ollama
|
||||
llm = Ollama(model="llama2")
|
||||
```
|
||||
|
||||
@ -412,7 +412,7 @@ pip install langchainhub
|
||||
Now we can use it to get a predefined prompt
|
||||
|
||||
```python
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
from langchain import hub
|
||||
from langchain.agents import create_openai_functions_agent
|
||||
from langchain.agents import AgentExecutor
|
||||
@ -476,14 +476,14 @@ from typing import List
|
||||
|
||||
from fastapi import FastAPI
|
||||
from langchain.prompts import ChatPromptTemplate
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
from langchain_community.document_loaders import WebBaseLoader
|
||||
from langchain_community.embeddings import OpenAIEmbeddings
|
||||
from langchain_community.vectorstores import DocArrayInMemorySearch
|
||||
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
||||
from langchain.tools.retriever import create_retriever_tool
|
||||
from langchain_community.tools.tavily_search import TavilySearchResults
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
from langchain import hub
|
||||
from langchain.agents import create_openai_functions_agent
|
||||
from langchain.agents import AgentExecutor
|
||||
|
@ -25,7 +25,7 @@ Let's suppose we have a simple agent, and want to visualize the actions it takes
|
||||
|
||||
```python
|
||||
from langchain.agents import AgentType, initialize_agent, load_tools
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
|
||||
llm = ChatOpenAI(model_name="gpt-4", temperature=0)
|
||||
tools = load_tools(["ddg-search", "llm-math"], llm=llm)
|
||||
|
@ -120,8 +120,8 @@
|
||||
"from typing import Any, Optional\n",
|
||||
"\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.chat_models import ChatAnthropic\n",
|
||||
"from langchain.evaluation import PairwiseStringEvaluator\n",
|
||||
"from langchain_community.chat_models import ChatAnthropic\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class CustomPreferenceEvaluator(PairwiseStringEvaluator):\n",
|
||||
|
@ -156,7 +156,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.embeddings import HuggingFaceEmbeddings\n",
|
||||
"from langchain_community.embeddings import HuggingFaceEmbeddings\n",
|
||||
"\n",
|
||||
"embedding_model = HuggingFaceEmbeddings()\n",
|
||||
"hf_evaluator = load_evaluator(\"pairwise_embedding_distance\", embeddings=embedding_model)"
|
||||
|
@ -236,7 +236,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatAnthropic\n",
|
||||
"from langchain_community.chat_models import ChatAnthropic\n",
|
||||
"\n",
|
||||
"llm = ChatAnthropic(temperature=0)\n",
|
||||
"\n",
|
||||
|
@ -99,8 +99,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import AgentType, Tool, initialize_agent\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.utilities import SerpAPIWrapper\n",
|
||||
"from langchain_community.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"# Initialize the language model\n",
|
||||
"# You can add your own OpenAI API key by adding openai_api_key=\"<your_api_key>\"\n",
|
||||
|
@ -331,7 +331,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatAnthropic\n",
|
||||
"from langchain_community.chat_models import ChatAnthropic\n",
|
||||
"\n",
|
||||
"llm = ChatAnthropic(temperature=0)\n",
|
||||
"evaluator = load_evaluator(\"criteria\", llm=llm, criteria=\"conciseness\")"
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user