mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-02 11:39:18 +00:00
DOCS: format notebooks (#13371)
This commit is contained in:
@@ -221,13 +221,14 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import base64\n",
|
||||
"import io\n",
|
||||
"import os\n",
|
||||
"import base64\n",
|
||||
"\n",
|
||||
"import numpy as np\n",
|
||||
"from PIL import Image\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.schema.messages import HumanMessage, SystemMessage\n",
|
||||
"from PIL import Image\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def encode_image(image_path):\n",
|
||||
@@ -294,7 +295,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from IPython.display import display, HTML\n",
|
||||
"from IPython.display import HTML, display\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def plt_img_base64(img_base64):\n",
|
||||
@@ -347,11 +348,12 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.schema.document import Document\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.schema.document import Document\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"\n",
|
||||
"# The vectorstore to use to index the child chunks\n",
|
||||
"vectorstore = Chroma(\n",
|
||||
@@ -524,7 +526,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"from langchain.schema.runnable import RunnablePassthrough, RunnableLambda\n",
|
||||
"\n",
|
||||
"from langchain.schema.runnable import RunnableLambda, RunnablePassthrough\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def prompt_func(dict):\n",
|
||||
|
@@ -102,8 +102,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from pydantic import BaseModel\n",
|
||||
"from typing import Any\n",
|
||||
"\n",
|
||||
"from pydantic import BaseModel\n",
|
||||
"from unstructured.partition.pdf import partition_pdf\n",
|
||||
"\n",
|
||||
"# Get elements\n",
|
||||
@@ -316,11 +317,12 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.schema.document import Document\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.schema.document import Document\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"\n",
|
||||
"# The vectorstore to use to index the child chunks\n",
|
||||
"vectorstore = Chroma(collection_name=\"summaries\", embedding_function=OpenAIEmbeddings())\n",
|
||||
|
@@ -92,8 +92,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from pydantic import BaseModel\n",
|
||||
"from typing import Any\n",
|
||||
"\n",
|
||||
"from pydantic import BaseModel\n",
|
||||
"from unstructured.partition.pdf import partition_pdf\n",
|
||||
"\n",
|
||||
"# Get elements\n",
|
||||
@@ -336,8 +337,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import glob\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"# Get all .txt file summaries\n",
|
||||
"file_paths = glob.glob(os.path.expanduser(os.path.join(path, \"*.txt\")))\n",
|
||||
@@ -371,11 +372,12 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.schema.document import Document\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.schema.document import Document\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"\n",
|
||||
"# The vectorstore to use to index the child chunks\n",
|
||||
"vectorstore = Chroma(collection_name=\"summaries\", embedding_function=OpenAIEmbeddings())\n",
|
||||
|
@@ -82,8 +82,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from pydantic import BaseModel\n",
|
||||
"from typing import Any\n",
|
||||
"\n",
|
||||
"from pydantic import BaseModel\n",
|
||||
"from unstructured.partition.pdf import partition_pdf\n",
|
||||
"\n",
|
||||
"# Path to save images\n",
|
||||
@@ -320,8 +321,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import glob\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"# Get all .txt files in the directory\n",
|
||||
"file_paths = glob.glob(os.path.expanduser(os.path.join(path, \"*.txt\")))\n",
|
||||
@@ -374,11 +375,12 @@
|
||||
],
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.schema.document import Document\n",
|
||||
"\n",
|
||||
"from langchain.embeddings import GPT4AllEmbeddings\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.schema.document import Document\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"\n",
|
||||
"# The vectorstore to use to index the child chunks\n",
|
||||
"vectorstore = Chroma(\n",
|
||||
|
@@ -132,8 +132,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"\n",
|
||||
"baseline = Chroma.from_texts(\n",
|
||||
" texts=all_splits_pypdf_texts,\n",
|
||||
@@ -197,12 +197,13 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Image summary chain\n",
|
||||
"import os\n",
|
||||
"import base64\n",
|
||||
"import io\n",
|
||||
"import os\n",
|
||||
"from io import BytesIO\n",
|
||||
"from PIL import Image\n",
|
||||
"\n",
|
||||
"from langchain.schema.messages import HumanMessage\n",
|
||||
"from PIL import Image\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def encode_image(image_path):\n",
|
||||
@@ -270,9 +271,10 @@
|
||||
"source": [
|
||||
"import uuid\n",
|
||||
"from base64 import b64decode\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"from langchain.schema.document import Document\n",
|
||||
"\n",
|
||||
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
|
||||
"from langchain.schema.document import Document\n",
|
||||
"from langchain.storage import InMemoryStore\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def create_multi_vector_retriever(\n",
|
||||
@@ -363,7 +365,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from IPython.display import display, HTML\n",
|
||||
"from IPython.display import HTML, display\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def plt_img_base64(img_base64):\n",
|
||||
@@ -472,6 +474,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.schema.runnable import RunnablePassthrough\n",
|
||||
"\n",
|
||||
"# Prompt\n",
|
||||
@@ -516,6 +519,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import re\n",
|
||||
"\n",
|
||||
"from langchain.schema import Document\n",
|
||||
"from langchain.schema.runnable import RunnableLambda\n",
|
||||
"\n",
|
||||
|
@@ -27,10 +27,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.utilities import SerpAPIWrapper\n",
|
||||
"from langchain.agents import Tool\n",
|
||||
"from langchain.tools.file_management.write import WriteFileTool\n",
|
||||
"from langchain.tools.file_management.read import ReadFileTool\n",
|
||||
"from langchain.tools.file_management.write import WriteFileTool\n",
|
||||
"from langchain.utilities import SerpAPIWrapper\n",
|
||||
"\n",
|
||||
"search = SerpAPIWrapper()\n",
|
||||
"tools = [\n",
|
||||
@@ -61,9 +61,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain.docstore import InMemoryDocstore\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings"
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -100,8 +100,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_experimental.autonomous_agents import AutoGPT\n",
|
||||
"from langchain.chat_models import ChatOpenAI"
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_experimental.autonomous_agents import AutoGPT"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -34,16 +34,15 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# General\n",
|
||||
"import os\n",
|
||||
"import pandas as pd\n",
|
||||
"from langchain_experimental.autonomous_agents import AutoGPT\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"from langchain.agents.agent_toolkits.pandas.base import create_pandas_dataframe_agent\n",
|
||||
"from langchain.docstore.document import Document\n",
|
||||
"import asyncio\n",
|
||||
"import nest_asyncio\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"import nest_asyncio\n",
|
||||
"import pandas as pd\n",
|
||||
"from langchain.agents.agent_toolkits.pandas.base import create_pandas_dataframe_agent\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.docstore.document import Document\n",
|
||||
"from langchain_experimental.autonomous_agents import AutoGPT\n",
|
||||
"\n",
|
||||
"# Needed synce jupyter runs an async eventloop\n",
|
||||
"nest_asyncio.apply()"
|
||||
@@ -92,6 +91,7 @@
|
||||
"import os\n",
|
||||
"from contextlib import contextmanager\n",
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain.agents import tool\n",
|
||||
"from langchain.tools.file_management.read import ReadFileTool\n",
|
||||
"from langchain.tools.file_management.write import WriteFileTool\n",
|
||||
@@ -223,14 +223,13 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.tools import BaseTool, DuckDuckGoSearchRun\n",
|
||||
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
||||
"\n",
|
||||
"from pydantic import Field\n",
|
||||
"from langchain.chains.qa_with_sources.loading import (\n",
|
||||
" load_qa_with_sources_chain,\n",
|
||||
" BaseCombineDocumentsChain,\n",
|
||||
" load_qa_with_sources_chain,\n",
|
||||
")\n",
|
||||
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
||||
"from langchain.tools import BaseTool, DuckDuckGoSearchRun\n",
|
||||
"from pydantic import Field\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def _get_text_splitter():\n",
|
||||
@@ -311,9 +310,9 @@
|
||||
"source": [
|
||||
"# Memory\n",
|
||||
"import faiss\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain.docstore import InMemoryDocstore\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"\n",
|
||||
"embeddings_model = OpenAIEmbeddings()\n",
|
||||
"embedding_size = 1536\n",
|
||||
|
@@ -31,8 +31,8 @@
|
||||
"source": [
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_experimental.autonomous_agents import BabyAGI"
|
||||
]
|
||||
},
|
||||
@@ -53,8 +53,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain.docstore import InMemoryDocstore"
|
||||
"from langchain.docstore import InMemoryDocstore\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -28,9 +28,9 @@
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain_experimental.autonomous_agents import BabyAGI"
|
||||
]
|
||||
},
|
||||
@@ -62,8 +62,8 @@
|
||||
"source": [
|
||||
"%pip install faiss-cpu > /dev/null\n",
|
||||
"%pip install google-search-results > /dev/null\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain.docstore import InMemoryDocstore"
|
||||
"from langchain.docstore import InMemoryDocstore\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -106,10 +106,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import ZeroShotAgent, Tool, AgentExecutor\n",
|
||||
"from langchain.agents import AgentExecutor, Tool, ZeroShotAgent\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.utilities import SerpAPIWrapper\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"\n",
|
||||
"todo_prompt = PromptTemplate.from_template(\n",
|
||||
" \"You are a planner who is an expert at coming up with a todo list for a given objective. Come up with a todo list for this objective: {objective}\"\n",
|
||||
|
@@ -35,16 +35,17 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts.chat import (\n",
|
||||
" SystemMessagePromptTemplate,\n",
|
||||
" HumanMessagePromptTemplate,\n",
|
||||
" SystemMessagePromptTemplate,\n",
|
||||
")\n",
|
||||
"from langchain.schema import (\n",
|
||||
" AIMessage,\n",
|
||||
" BaseMessage,\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
" BaseMessage,\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
|
@@ -47,10 +47,9 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from IPython.display import SVG\n",
|
||||
"\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_experimental.cpal.base import CPALChain\n",
|
||||
"from langchain_experimental.pal_chain import PALChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0, max_tokens=512)\n",
|
||||
"cpal_chain = CPALChain.from_univariate_prompt(llm=llm, verbose=True)\n",
|
||||
|
@@ -717,7 +717,6 @@
|
||||
"source": [
|
||||
"from langchain.vectorstores import DeepLake\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"username = \"<USERNAME_OR_ORG>\"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
@@ -834,8 +833,8 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.chains import ConversationalRetrievalChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI(\n",
|
||||
" model_name=\"gpt-3.5-turbo-0613\"\n",
|
||||
|
@@ -32,19 +32,20 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import re\n",
|
||||
"from typing import Union\n",
|
||||
"\n",
|
||||
"from langchain.agents import (\n",
|
||||
" AgentExecutor,\n",
|
||||
" LLMSingleActionAgent,\n",
|
||||
" AgentOutputParser,\n",
|
||||
" LLMSingleActionAgent,\n",
|
||||
")\n",
|
||||
"from langchain.prompts import StringPromptTemplate\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from typing import Union\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish\n",
|
||||
"from langchain.agents.agent_toolkits import NLAToolkit\n",
|
||||
"from langchain.tools.plugin import AIPlugin\n",
|
||||
"import re"
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import StringPromptTemplate\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish\n",
|
||||
"from langchain.tools.plugin import AIPlugin"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -113,9 +114,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.schema import Document"
|
||||
"from langchain.schema import Document\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -56,20 +56,21 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import re\n",
|
||||
"from typing import Union\n",
|
||||
"\n",
|
||||
"import plugnplai\n",
|
||||
"from langchain.agents import (\n",
|
||||
" AgentExecutor,\n",
|
||||
" LLMSingleActionAgent,\n",
|
||||
" AgentOutputParser,\n",
|
||||
" LLMSingleActionAgent,\n",
|
||||
")\n",
|
||||
"from langchain.prompts import StringPromptTemplate\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from typing import Union\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish\n",
|
||||
"from langchain.agents.agent_toolkits import NLAToolkit\n",
|
||||
"from langchain.tools.plugin import AIPlugin\n",
|
||||
"import re\n",
|
||||
"import plugnplai"
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import StringPromptTemplate\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish\n",
|
||||
"from langchain.tools.plugin import AIPlugin"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -137,9 +138,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.schema import Document"
|
||||
"from langchain.schema import Document\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -48,16 +48,17 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.chains import RetrievalQA\n",
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.text_splitter import (\n",
|
||||
" RecursiveCharacterTextSplitter,\n",
|
||||
" CharacterTextSplitter,\n",
|
||||
" RecursiveCharacterTextSplitter,\n",
|
||||
")\n",
|
||||
"from langchain.vectorstores import DeepLake\n",
|
||||
"from langchain.chains import RetrievalQA\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"OpenAI API Key:\")\n",
|
||||
"activeloop_token = getpass.getpass(\"Activeloop Token:\")\n",
|
||||
|
@@ -38,9 +38,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from elasticsearch import Elasticsearch\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.chains.elasticsearch_database import ElasticsearchDatabaseChain"
|
||||
"from langchain.chains.elasticsearch_database import ElasticsearchDatabaseChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -19,10 +19,11 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List, Optional\n",
|
||||
"\n",
|
||||
"from langchain.chains.openai_tools import create_extraction_chain_pydantic\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.pydantic_v1 import BaseModel\n",
|
||||
"from typing import Optional, List\n",
|
||||
"from langchain.chains.openai_tools import create_extraction_chain_pydantic"
|
||||
"from langchain.pydantic_v1 import BaseModel"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -30,9 +30,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import load_tools\n",
|
||||
"from langchain.agents import initialize_agent\n",
|
||||
"from langchain.agents import AgentType"
|
||||
"from langchain.agents import AgentType, initialize_agent, load_tools"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -67,16 +67,16 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.schema import BaseRetriever\n",
|
||||
"from typing import Any, List\n",
|
||||
"\n",
|
||||
"from langchain.callbacks.manager import (\n",
|
||||
" AsyncCallbackManagerForRetrieverRun,\n",
|
||||
" CallbackManagerForRetrieverRun,\n",
|
||||
")\n",
|
||||
"from langchain.utilities import GoogleSerperAPIWrapper\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.schema import Document\n",
|
||||
"from typing import Any, List"
|
||||
"from langchain.schema import BaseRetriever, Document\n",
|
||||
"from langchain.utilities import GoogleSerperAPIWrapper"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -46,14 +46,13 @@
|
||||
"source": [
|
||||
"from datetime import datetime, timedelta\n",
|
||||
"from typing import List\n",
|
||||
"from termcolor import colored\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.docstore import InMemoryDocstore\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.retrievers import TimeWeightedVectorStoreRetriever\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"from termcolor import colored"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -153,6 +152,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import math\n",
|
||||
"\n",
|
||||
"import faiss\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
@@ -28,12 +28,11 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import tenacity\n",
|
||||
"\n",
|
||||
"from langchain.output_parsers import RegexParser\n",
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
")\n",
|
||||
"from langchain.output_parsers import RegexParser"
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -55,9 +55,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import load_tools\n",
|
||||
"from langchain.agents import initialize_agent\n",
|
||||
"from langchain.agents import AgentType"
|
||||
"from langchain.agents import AgentType, initialize_agent, load_tools"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -28,9 +28,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import load_tools\n",
|
||||
"from langchain.agents import initialize_agent\n",
|
||||
"from langchain.agents import AgentType"
|
||||
"from langchain.agents import AgentType, initialize_agent, load_tools"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -20,9 +20,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.chains import HypotheticalDocumentEmbedder, LLMChain\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.chains import LLMChain, HypotheticalDocumentEmbedder\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate"
|
||||
]
|
||||
},
|
||||
|
@@ -790,8 +790,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.prompts.prompt import PromptTemplate\n",
|
||||
"from langchain.globals import set_debug\n",
|
||||
"from langchain.prompts.prompt import PromptTemplate\n",
|
||||
"\n",
|
||||
"set_debug(True)\n",
|
||||
"\n",
|
||||
|
@@ -43,8 +43,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain_experimental.llm_bash.base import LLMBashChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_experimental.llm_bash.base import LLMBashChain\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0)\n",
|
||||
"\n",
|
||||
@@ -69,8 +69,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts.prompt import PromptTemplate\n",
|
||||
"from langchain.chains.llm_bash.prompt import BashOutputParser\n",
|
||||
"from langchain.prompts.prompt import PromptTemplate\n",
|
||||
"\n",
|
||||
"_PROMPT_TEMPLATE = \"\"\"If someone asks you to perform a task, your job is to come up with a series of bash commands that will perform the task. There is no need to put \"#!/bin/bash\" in your answer. Make sure to reason step by step, using this format:\n",
|
||||
"Question: \"copy the files in the directory named 'target' into a new directory at the same level as target called 'myNewDirectory'\"\n",
|
||||
@@ -185,7 +185,6 @@
|
||||
"source": [
|
||||
"from langchain_experimental.llm_bash.bash import BashProcess\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"persistent_process = BashProcess(persistent=True)\n",
|
||||
"bash_chain = LLMBashChain.from_llm(llm, bash_process=persistent_process, verbose=True)\n",
|
||||
"\n",
|
||||
|
@@ -45,7 +45,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\nfrom langchain.chains import LLMMathChain\n",
|
||||
"from langchain.chains import LLMMathChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0)\n",
|
||||
"llm_math = LLMMathChain.from_llm(llm, verbose=True)\n",
|
||||
|
@@ -56,10 +56,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.memory import ConversationBufferWindowMemory"
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.memory import ConversationBufferWindowMemory\n",
|
||||
"from langchain.prompts import PromptTemplate"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -40,12 +40,13 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import io\n",
|
||||
"import base64\n",
|
||||
"import io\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"import numpy as np\n",
|
||||
"from IPython.display import HTML, display\n",
|
||||
"from PIL import Image\n",
|
||||
"from IPython.display import display, HTML\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def encode_image(image_path):\n",
|
||||
|
@@ -184,11 +184,12 @@
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import uuid\n",
|
||||
"\n",
|
||||
"import chromadb\n",
|
||||
"import numpy as np\n",
|
||||
"from PIL import Image as _PILImage\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain_experimental.open_clip import OpenCLIPEmbeddings\n",
|
||||
"from PIL import Image as _PILImage\n",
|
||||
"\n",
|
||||
"# Create chroma\n",
|
||||
"vectorstore = Chroma(\n",
|
||||
@@ -233,10 +234,11 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import io\n",
|
||||
"import numpy as np\n",
|
||||
"import base64\n",
|
||||
"import io\n",
|
||||
"from io import BytesIO\n",
|
||||
"\n",
|
||||
"import numpy as np\n",
|
||||
"from PIL import Image\n",
|
||||
"\n",
|
||||
"\n",
|
||||
@@ -312,10 +314,11 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.schema.output_parser import StrOutputParser\n",
|
||||
"from langchain.schema.runnable import RunnablePassthrough, RunnableLambda\n",
|
||||
"from langchain.schema.messages import HumanMessage, SystemMessage\n",
|
||||
"from langchain.schema.output_parser import StrOutputParser\n",
|
||||
"from langchain.schema.runnable import RunnableLambda, RunnablePassthrough\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def prompt_func(data_dict):\n",
|
||||
@@ -421,7 +424,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from IPython.display import display, HTML\n",
|
||||
"from IPython.display import HTML, display\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def plt_img_base64(img_base64):\n",
|
||||
|
@@ -29,9 +29,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from steamship import Block, Steamship\n",
|
||||
"import re\n",
|
||||
"from IPython.display import Image"
|
||||
"\n",
|
||||
"from IPython.display import Image\n",
|
||||
"from steamship import Block, Steamship"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -41,9 +42,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import AgentType, initialize_agent\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.agents import initialize_agent\n",
|
||||
"from langchain.agents import AgentType\n",
|
||||
"from langchain.tools import SteamshipImageGenerationTool"
|
||||
]
|
||||
},
|
||||
|
@@ -26,7 +26,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List, Callable\n",
|
||||
"from typing import Callable, List\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
|
@@ -27,17 +27,17 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from collections import OrderedDict\n",
|
||||
"import functools\n",
|
||||
"import random\n",
|
||||
"import tenacity\n",
|
||||
"from typing import List, Callable\n",
|
||||
"from collections import OrderedDict\n",
|
||||
"from typing import Callable, List\n",
|
||||
"\n",
|
||||
"import tenacity\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.output_parsers import RegexParser\n",
|
||||
"from langchain.prompts import (\n",
|
||||
" PromptTemplate,\n",
|
||||
")\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.output_parsers import RegexParser\n",
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
|
@@ -24,11 +24,12 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from typing import Callable, List\n",
|
||||
"\n",
|
||||
"import tenacity\n",
|
||||
"from typing import List, Callable\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.output_parsers import RegexParser\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
|
@@ -27,17 +27,15 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from os import environ\n",
|
||||
"import getpass\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.utilities import SQLDatabase\n",
|
||||
"from os import environ\n",
|
||||
"\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain\n",
|
||||
"from sqlalchemy import create_engine, MetaData\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"from sqlalchemy import create_engine\n",
|
||||
"from langchain.utilities import SQLDatabase\n",
|
||||
"from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain\n",
|
||||
"from sqlalchemy import MetaData, create_engine\n",
|
||||
"\n",
|
||||
"MYSCALE_HOST = \"msc-4a9e710a.us-east-1.aws.staging.myscale.cloud\"\n",
|
||||
"MYSCALE_PORT = 443\n",
|
||||
@@ -76,9 +74,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.callbacks import StdOutCallbackHandler\n",
|
||||
"\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.utilities.sql_database import SQLDatabase\n",
|
||||
"from langchain_experimental.sql.prompt import MYSCALE_PROMPT\n",
|
||||
"from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain\n",
|
||||
@@ -119,15 +116,16 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesChain\n",
|
||||
"\n",
|
||||
"from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain_experimental.retrievers.vector_sql_database import (\n",
|
||||
" VectorSQLDatabaseChainRetriever,\n",
|
||||
")\n",
|
||||
"from langchain_experimental.sql.prompt import MYSCALE_PROMPT\n",
|
||||
"from langchain_experimental.sql.vector_sql import VectorSQLRetrieveAllOutputParser\n",
|
||||
"from langchain_experimental.sql.vector_sql import (\n",
|
||||
" VectorSQLDatabaseChain,\n",
|
||||
" VectorSQLRetrieveAllOutputParser,\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"output_parser_retrieve_all = VectorSQLRetrieveAllOutputParser.from_embeddings(\n",
|
||||
" output_parser.model\n",
|
||||
|
@@ -50,10 +50,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.chains import create_qa_with_sources_chain\n",
|
||||
"from langchain.chains.combine_documents.stuff import StuffDocumentsChain\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.chains import create_qa_with_sources_chain"
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -230,9 +230,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chains import ConversationalRetrievalChain\n",
|
||||
"from langchain.chains import ConversationalRetrievalChain, LLMChain\n",
|
||||
"from langchain.memory import ConversationBufferMemory\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"\n",
|
||||
"memory = ConversationBufferMemory(memory_key=\"chat_history\", return_messages=True)\n",
|
||||
"_template = \"\"\"Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.\\\n",
|
||||
@@ -357,12 +356,10 @@
|
||||
"source": [
|
||||
"from typing import List\n",
|
||||
"\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"\n",
|
||||
"from langchain.chains.openai_functions import create_qa_with_structure_chain\n",
|
||||
"\n",
|
||||
"from langchain.prompts.chat import ChatPromptTemplate, HumanMessagePromptTemplate\n",
|
||||
"from langchain.schema import SystemMessage, HumanMessage"
|
||||
"from langchain.schema import HumanMessage, SystemMessage\n",
|
||||
"from pydantic import BaseModel, Field"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -167,7 +167,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.tools import E2BDataAnalysisTool, DuckDuckGoSearchRun\n",
|
||||
"from langchain.tools import DuckDuckGoSearchRun, E2BDataAnalysisTool\n",
|
||||
"\n",
|
||||
"tools = [E2BDataAnalysisTool(api_key=\"...\"), DuckDuckGoSearchRun()]"
|
||||
]
|
||||
@@ -456,9 +456,9 @@
|
||||
"from typing import Literal\n",
|
||||
"\n",
|
||||
"from langchain.output_parsers.openai_tools import PydanticToolsParser\n",
|
||||
"from langchain.utils.openai_functions import convert_pydantic_to_openai_tool\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.pydantic_v1 import BaseModel, Field\n",
|
||||
"from langchain.utils.openai_functions import convert_pydantic_to_openai_tool\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class GetCurrentWeather(BaseModel):\n",
|
||||
|
@@ -45,14 +45,14 @@
|
||||
"source": [
|
||||
"import collections\n",
|
||||
"import inspect\n",
|
||||
"import tenacity\n",
|
||||
"\n",
|
||||
"import tenacity\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.output_parsers import RegexParser\n",
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
")\n",
|
||||
"from langchain.output_parsers import RegexParser"
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -17,8 +17,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_experimental.pal_chain import PALChain\n",
|
||||
"from langchain.llms import OpenAI"
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain_experimental.pal_chain import PALChain"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -54,13 +54,13 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from baidubce.bce_client_configuration import BceClientConfiguration\n",
|
||||
"from baidubce.auth.bce_credentials import BceCredentials\n",
|
||||
"from baidubce.bce_client_configuration import BceClientConfiguration\n",
|
||||
"from langchain.document_loaders.baiducloud_bos_directory import BaiduBOSDirectoryLoader\n",
|
||||
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
||||
"from langchain.embeddings.huggingface import HuggingFaceEmbeddings\n",
|
||||
"from langchain.vectorstores import BESVectorStore\n",
|
||||
"from langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint"
|
||||
"from langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint\n",
|
||||
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
||||
"from langchain.vectorstores import BESVectorStore"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -30,8 +30,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pinecone\n",
|
||||
"from langchain.vectorstores import Pinecone\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import Pinecone\n",
|
||||
"\n",
|
||||
"pinecone.init(api_key=\"...\", environment=\"...\")"
|
||||
]
|
||||
|
@@ -28,8 +28,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = os.environ.get(\"OPENAI_API_KEY\") or getpass.getpass(\n",
|
||||
" \"OpenAI API Key:\"\n",
|
||||
@@ -42,8 +42,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.sql_database import SQLDatabase\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.sql_database import SQLDatabase\n",
|
||||
"\n",
|
||||
"CONNECTION_STRING = \"postgresql+psycopg2://postgres:test@localhost:5432/vectordb\" # Replace with your own\n",
|
||||
"db = SQLDatabase.from_uri(CONNECTION_STRING)"
|
||||
@@ -323,6 +323,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import re\n",
|
||||
"\n",
|
||||
"from langchain.schema.runnable import RunnableLambda\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
@@ -31,8 +31,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.schema.output_parser import StrOutputParser\n",
|
||||
"from langchain.schema.runnable import RunnablePassthrough\n",
|
||||
"from langchain.utilities import DuckDuckGoSearchAPIWrapper"
|
||||
|
@@ -42,22 +42,22 @@
|
||||
"OPENAI_API_KEY = \"sk-xx\"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = OPENAI_API_KEY\n",
|
||||
"\n",
|
||||
"from typing import Dict, List, Any, Union, Callable\n",
|
||||
"from pydantic import BaseModel, Field\n",
|
||||
"from langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.llms import BaseLLM\n",
|
||||
"from langchain.chains.base import Chain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.agents import Tool, LLMSingleActionAgent, AgentExecutor\n",
|
||||
"from langchain.text_splitter import CharacterTextSplitter\n",
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.chains import RetrievalQA\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts.base import StringPromptTemplate\n",
|
||||
"from typing import Any, Callable, Dict, List, Union\n",
|
||||
"\n",
|
||||
"from langchain.agents import AgentExecutor, LLMSingleActionAgent, Tool\n",
|
||||
"from langchain.agents.agent import AgentOutputParser\n",
|
||||
"from langchain.agents.conversational.prompt import FORMAT_INSTRUCTIONS\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish"
|
||||
"from langchain.chains import LLMChain, RetrievalQA\n",
|
||||
"from langchain.chains.base import Chain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.llms import BaseLLM, OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.prompts.base import StringPromptTemplate\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish\n",
|
||||
"from langchain.text_splitter import CharacterTextSplitter\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from pydantic import BaseModel, Field"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -17,10 +17,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.schema.prompt import PromptValue\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.schema.output_parser import StrOutputParser"
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.schema.output_parser import StrOutputParser\n",
|
||||
"from langchain.schema.prompt import PromptValue"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -51,8 +51,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_experimental.smart_llm import SmartLLMChain"
|
||||
]
|
||||
},
|
||||
|
@@ -131,7 +131,6 @@
|
||||
"source": [
|
||||
"from langchain.utilities import DuckDuckGoSearchAPIWrapper\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"search = DuckDuckGoSearchAPIWrapper(max_results=4)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
@@ -84,10 +84,11 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import re\n",
|
||||
"from typing import Tuple\n",
|
||||
"\n",
|
||||
"from langchain_experimental.tot.checker import ToTChecker\n",
|
||||
"from langchain_experimental.tot.thought import ThoughtValidity\n",
|
||||
"import re\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class MyChecker(ToTChecker):\n",
|
||||
|
@@ -34,8 +34,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.vectorstores import DeepLake\n",
|
||||
@@ -109,6 +109,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.document_loaders import TextLoader\n",
|
||||
"\n",
|
||||
"root_dir = \"./the-algorithm\"\n",
|
||||
@@ -3807,8 +3808,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.chains import ConversationalRetrievalChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI(model_name=\"gpt-3.5-turbo-0613\") # switch to 'gpt-4'\n",
|
||||
"qa = ConversationalRetrievalChain.from_llm(model, retriever=retriever)"
|
||||
|
@@ -22,7 +22,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List, Callable\n",
|
||||
"from typing import Callable, List\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.memory import ConversationBufferMemory\n",
|
||||
"from langchain.schema import (\n",
|
||||
@@ -45,9 +46,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import initialize_agent\n",
|
||||
"from langchain.agents import AgentType\n",
|
||||
"from langchain.agents import load_tools"
|
||||
"from langchain.agents import AgentType, initialize_agent, load_tools"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -22,7 +22,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List, Callable\n",
|
||||
"from typing import Callable, List\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.schema import (\n",
|
||||
" HumanMessage,\n",
|
||||
|
@@ -192,10 +192,10 @@
|
||||
" return current\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"import requests\n",
|
||||
"\n",
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"import requests\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def vocab_lookup(\n",
|
||||
" search: str,\n",
|
||||
@@ -319,9 +319,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import requests\n",
|
||||
"from typing import List, Dict, Any\n",
|
||||
"import json\n",
|
||||
"from typing import Any, Dict, List\n",
|
||||
"\n",
|
||||
"import requests\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def run_sparql(\n",
|
||||
@@ -389,17 +390,18 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import (\n",
|
||||
" Tool,\n",
|
||||
" AgentExecutor,\n",
|
||||
" LLMSingleActionAgent,\n",
|
||||
" AgentOutputParser,\n",
|
||||
")\n",
|
||||
"from langchain.prompts import StringPromptTemplate\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"import re\n",
|
||||
"from typing import List, Union\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish\n",
|
||||
"import re"
|
||||
"\n",
|
||||
"from langchain.agents import (\n",
|
||||
" AgentExecutor,\n",
|
||||
" AgentOutputParser,\n",
|
||||
" LLMSingleActionAgent,\n",
|
||||
" Tool,\n",
|
||||
")\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.prompts import StringPromptTemplate\n",
|
||||
"from langchain.schema import AgentAction, AgentFinish"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.agents import XMLAgent, tool, AgentExecutor\n",
|
||||
"from langchain.agents import AgentExecutor, XMLAgent, tool\n",
|
||||
"from langchain.chat_models import ChatAnthropic"
|
||||
]
|
||||
},
|
||||
|
@@ -26,7 +26,6 @@
|
||||
"from langchain.schema.runnable import RunnableLambda, RunnablePassthrough\n",
|
||||
"from langchain.utils.math import cosine_similarity\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"physics_template = \"\"\"You are a very smart physics professor. \\\n",
|
||||
"You are great at answering questions about physics in a concise and easy to understand manner. \\\n",
|
||||
"When you don't know the answer to a question you admit that you don't know.\n",
|
||||
|
@@ -18,10 +18,11 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.memory import ConversationBufferMemory\n",
|
||||
"from langchain.schema.runnable import RunnablePassthrough, RunnableLambda\n",
|
||||
"from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
|
||||
"from langchain.schema.runnable import RunnableLambda, RunnablePassthrough\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI()\n",
|
||||
"prompt = ChatPromptTemplate.from_messages(\n",
|
||||
|
@@ -42,8 +42,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"\n",
|
||||
"prompt = ChatPromptTemplate.from_template(\"tell me a joke about {foo}\")\n",
|
||||
"model = ChatOpenAI()\n",
|
||||
|
@@ -38,11 +38,11 @@
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.embeddings import OpenAIEmbeddings\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.schema.output_parser import StrOutputParser\n",
|
||||
"from langchain.schema.runnable import RunnablePassthrough, RunnableLambda\n",
|
||||
"from langchain.schema.runnable import RunnableLambda, RunnablePassthrough\n",
|
||||
"from langchain.vectorstores import FAISS"
|
||||
]
|
||||
},
|
||||
@@ -170,8 +170,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.schema.runnable import RunnableMap\n",
|
||||
"from langchain.schema import format_document"
|
||||
"from langchain.schema import format_document\n",
|
||||
"from langchain.schema.runnable import RunnableMap"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -231,7 +231,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import Tuple, List\n",
|
||||
"from typing import List, Tuple\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def _format_chat_history(chat_history: List[Tuple]) -> str:\n",
|
||||
@@ -335,6 +335,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.memory import ConversationBufferMemory"
|
||||
]
|
||||
},
|
||||
|
@@ -262,9 +262,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI, ChatAnthropic\n",
|
||||
"from langchain.schema.runnable import ConfigurableField\n",
|
||||
"from langchain.prompts import PromptTemplate"
|
||||
"from langchain.chat_models import ChatAnthropic, ChatOpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.schema.runnable import ConfigurableField"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -31,7 +31,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI, ChatAnthropic"
|
||||
"from langchain.chat_models import ChatAnthropic, ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -50,6 +50,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from unittest.mock import patch\n",
|
||||
"\n",
|
||||
"from openai.error import RateLimitError"
|
||||
]
|
||||
},
|
||||
|
@@ -19,11 +19,12 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.schema.runnable import RunnableLambda\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.schema.runnable import RunnableLambda\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def length_function(text):\n",
|
||||
" return len(text)\n",
|
||||
@@ -91,8 +92,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.schema.runnable import RunnableConfig\n",
|
||||
"from langchain.schema.output_parser import StrOutputParser"
|
||||
"from langchain.schema.output_parser import StrOutputParser\n",
|
||||
"from langchain.schema.runnable import RunnableConfig"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -29,7 +29,6 @@
|
||||
"from langchain.prompts.chat import ChatPromptTemplate\n",
|
||||
"from langchain.schema.output_parser import StrOutputParser\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"prompt = ChatPromptTemplate.from_template(\n",
|
||||
" \"Write a comma-separated list of 5 animals similar to: {animal}\"\n",
|
||||
")\n",
|
||||
|
@@ -33,7 +33,6 @@
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.schema.runnable import RunnableParallel\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI()\n",
|
||||
"joke_chain = ChatPromptTemplate.from_template(\"tell me a joke about {topic}\") | model\n",
|
||||
"poem_chain = (\n",
|
||||
|
@@ -40,8 +40,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.chat_models import ChatAnthropic\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.schema.output_parser import StrOutputParser"
|
||||
]
|
||||
},
|
||||
|
@@ -57,8 +57,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"\n",
|
||||
"model = ChatOpenAI()\n",
|
||||
"prompt = ChatPromptTemplate.from_template(\"tell me a joke about {topic}\")\n",
|
||||
|
@@ -34,7 +34,8 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import Optional, Any\n",
|
||||
"from typing import Any, Optional\n",
|
||||
"\n",
|
||||
"from langchain.evaluation import PairwiseStringEvaluator\n",
|
||||
"\n",
|
||||
"\n",
|
||||
@@ -116,10 +117,11 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import Optional, Any\n",
|
||||
"from langchain.evaluation import PairwiseStringEvaluator\n",
|
||||
"from langchain.chat_models import ChatAnthropic\n",
|
||||
"from typing import Any, Optional\n",
|
||||
"\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.chat_models import ChatAnthropic\n",
|
||||
"from langchain.evaluation import PairwiseStringEvaluator\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class CustomPreferenceEvaluator(PairwiseStringEvaluator):\n",
|
||||
|
@@ -98,11 +98,9 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.utilities import SerpAPIWrapper\n",
|
||||
"from langchain.agents import initialize_agent, Tool\n",
|
||||
"from langchain.agents import AgentType\n",
|
||||
"from langchain.agents import AgentType, Tool, initialize_agent\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"from langchain.utilities import SerpAPIWrapper\n",
|
||||
"\n",
|
||||
"# Initialize the language model\n",
|
||||
"# You can add your own OpenAI API key by adding openai_api_key=\"<your_api_key>\"\n",
|
||||
@@ -171,9 +169,10 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from tqdm.notebook import tqdm\n",
|
||||
"import asyncio\n",
|
||||
"\n",
|
||||
"from tqdm.notebook import tqdm\n",
|
||||
"\n",
|
||||
"results = []\n",
|
||||
"agents = [functions_agent, conversations_agent]\n",
|
||||
"concurrency_level = 6 # How many concurrent agents to run. May need to decrease if OpenAI is rate limiting.\n",
|
||||
|
@@ -37,8 +37,8 @@
|
||||
"source": [
|
||||
"from typing import Any, Optional\n",
|
||||
"\n",
|
||||
"from langchain.evaluation import StringEvaluator\n",
|
||||
"from evaluate import load\n",
|
||||
"from langchain.evaluation import StringEvaluator\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class PerplexityEvaluator(StringEvaluator):\n",
|
||||
|
@@ -24,8 +24,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.evaluation import load_evaluator\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.evaluation import load_evaluator\n",
|
||||
"\n",
|
||||
"evaluator = load_evaluator(\"labeled_score_string\", llm=ChatOpenAI(model=\"gpt-4\"))"
|
||||
]
|
||||
|
@@ -22,10 +22,11 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import Any, Optional, Sequence, Tuple\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.schema import AgentAction\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.evaluation import AgentTrajectoryEvaluator\n",
|
||||
"from langchain.schema import AgentAction\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class StepNecessityEvaluator(AgentTrajectoryEvaluator):\n",
|
||||
|
@@ -72,13 +72,12 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import subprocess\n",
|
||||
"from urllib.parse import urlparse\n",
|
||||
"\n",
|
||||
"from langchain.agents import AgentType, initialize_agent\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.tools import tool\n",
|
||||
"from langchain.agents import AgentType, initialize_agent\n",
|
||||
"\n",
|
||||
"from pydantic import HttpUrl\n",
|
||||
"from urllib.parse import urlparse\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"@tool\n",
|
||||
|
@@ -33,7 +33,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOpenAI, ChatAnthropic"
|
||||
"from langchain.chat_models import ChatAnthropic, ChatOpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -52,6 +52,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from unittest.mock import patch\n",
|
||||
"\n",
|
||||
"from openai.error import RateLimitError"
|
||||
]
|
||||
},
|
||||
|
@@ -482,9 +482,9 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.chains.prompt_selector import ConditionalPromptSelector\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"\n",
|
||||
"DEFAULT_LLAMA_SEARCH_PROMPT = PromptTemplate(\n",
|
||||
" input_variables=[\"question\"],\n",
|
||||
|
@@ -19,9 +19,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI, Cohere, HuggingFaceHub\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.model_laboratory import ModelLaboratory"
|
||||
"from langchain.llms import Cohere, HuggingFaceHub, OpenAI\n",
|
||||
"from langchain.model_laboratory import ModelLaboratory\n",
|
||||
"from langchain.prompts import PromptTemplate"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -127,8 +127,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.prompts.prompt import PromptTemplate\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts.prompt import PromptTemplate\n",
|
||||
"\n",
|
||||
"anonymizer = PresidioAnonymizer()\n",
|
||||
"\n",
|
||||
@@ -286,7 +286,6 @@
|
||||
"# Define the regex pattern in a Presidio `Pattern` object:\n",
|
||||
"from presidio_analyzer import Pattern, PatternRecognizer\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"polish_phone_numbers_pattern = Pattern(\n",
|
||||
" name=\"polish_phone_numbers_pattern\",\n",
|
||||
" regex=\"(?<!\\w)(\\(?(\\+|00)?48\\)?)?[ -]?\\d{3}[ -]?\\d{3}[ -]?\\d{3}(?!\\w)\",\n",
|
||||
|
@@ -233,7 +233,6 @@
|
||||
"# Define the regex pattern in a Presidio `Pattern` object:\n",
|
||||
"from presidio_analyzer import Pattern, PatternRecognizer\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"polish_id_pattern = Pattern(\n",
|
||||
" name=\"polish_id_pattern\",\n",
|
||||
" regex=\"[A-Z]{3}\\d{6}\",\n",
|
||||
@@ -638,8 +637,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
||||
"from langchain.vectorstores import FAISS\n",
|
||||
"\n",
|
||||
"# 2. Load the data: In our case data's already loaded\n",
|
||||
@@ -664,12 +663,11 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from operator import itemgetter\n",
|
||||
"\n",
|
||||
"from langchain.chat_models.openai import ChatOpenAI\n",
|
||||
"from langchain.schema.runnable import RunnableMap\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
"from langchain.schema.output_parser import StrOutputParser\n",
|
||||
"from langchain.schema.runnable import RunnablePassthrough\n",
|
||||
"from langchain.schema.runnable import RunnableLambda\n",
|
||||
"from langchain.schema.runnable import RunnableLambda, RunnableMap, RunnablePassthrough\n",
|
||||
"\n",
|
||||
"# 6. Create anonymizer chain\n",
|
||||
"template = \"\"\"Answer the question based only on the following context:\n",
|
||||
|
@@ -207,8 +207,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.prompts.prompt import PromptTemplate\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts.prompt import PromptTemplate\n",
|
||||
"\n",
|
||||
"anonymizer = PresidioReversibleAnonymizer()\n",
|
||||
"\n",
|
||||
|
@@ -60,9 +60,10 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import boto3\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"import boto3\n",
|
||||
"\n",
|
||||
"comprehend_client = boto3.client(\"comprehend\", region_name=\"us-east-1\")"
|
||||
]
|
||||
},
|
||||
@@ -102,8 +103,8 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.llms.fake import FakeListLLM\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain_experimental.comprehend_moderation.base_moderation_exceptions import (\n",
|
||||
" ModerationPiiError,\n",
|
||||
")\n",
|
||||
@@ -174,8 +175,8 @@
|
||||
"source": [
|
||||
"from langchain_experimental.comprehend_moderation import (\n",
|
||||
" BaseModerationConfig,\n",
|
||||
" ModerationPromptSafetyConfig,\n",
|
||||
" ModerationPiiConfig,\n",
|
||||
" ModerationPromptSafetyConfig,\n",
|
||||
" ModerationToxicityConfig,\n",
|
||||
")\n",
|
||||
"\n",
|
||||
@@ -239,8 +240,8 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.llms.fake import FakeListLLM\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"\n",
|
||||
"template = \"\"\"Question: {question}\n",
|
||||
"\n",
|
||||
@@ -402,8 +403,8 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.llms.fake import FakeListLLM\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"\n",
|
||||
"template = \"\"\"Question: {question}\n",
|
||||
"\n",
|
||||
@@ -691,10 +692,11 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import json\n",
|
||||
"\n",
|
||||
"from langchain.llms import SagemakerEndpoint\n",
|
||||
"from langchain.llms.sagemaker_endpoint import LLMContentHandler\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"import json\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class ContentHandler(LLMContentHandler):\n",
|
||||
|
@@ -144,8 +144,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.agents import AgentType, initialize_agent\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.agents import initialize_agent, AgentType\n",
|
||||
"\n",
|
||||
"llm = OpenAI(temperature=0)\n",
|
||||
"agent = initialize_agent(\n",
|
||||
|
@@ -281,8 +281,8 @@
|
||||
],
|
||||
"source": [
|
||||
"from langchain.callbacks import ArgillaCallbackHandler, StdOutCallbackHandler\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"\n",
|
||||
"argilla_callback = ArgillaCallbackHandler(\n",
|
||||
|
@@ -104,12 +104,12 @@
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.callbacks import ContextCallbackHandler\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.schema import (\n",
|
||||
" SystemMessage,\n",
|
||||
" HumanMessage,\n",
|
||||
" SystemMessage,\n",
|
||||
")\n",
|
||||
"from langchain.callbacks import ContextCallbackHandler\n",
|
||||
"\n",
|
||||
"token = os.environ[\"CONTEXT_API_TOKEN\"]\n",
|
||||
"\n",
|
||||
@@ -162,14 +162,14 @@
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.callbacks import ContextCallbackHandler\n",
|
||||
"from langchain.chains import LLMChain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.prompts.chat import (\n",
|
||||
" ChatPromptTemplate,\n",
|
||||
" HumanMessagePromptTemplate,\n",
|
||||
")\n",
|
||||
"from langchain.callbacks import ContextCallbackHandler\n",
|
||||
"\n",
|
||||
"token = os.environ[\"CONTEXT_API_TOKEN\"]\n",
|
||||
"\n",
|
||||
|
@@ -46,13 +46,13 @@
|
||||
"source": [
|
||||
"import datetime as dt\n",
|
||||
"import json\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import matplotlib.dates as md\n",
|
||||
"import time\n",
|
||||
"\n",
|
||||
"import matplotlib.dates as md\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"from infinopy import InfinoClient\n",
|
||||
"from langchain.callbacks import InfinoCallbackHandler"
|
||||
"from langchain.callbacks import InfinoCallbackHandler\n",
|
||||
"from langchain.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -313,9 +313,9 @@
|
||||
"# Set your key here.\n",
|
||||
"# os.environ[\"OPENAI_API_KEY\"] = \"YOUR_API_KEY\"\n",
|
||||
"\n",
|
||||
"from langchain.chains.summarize import load_summarize_chain\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.document_loaders import WebBaseLoader\n",
|
||||
"from langchain.chains.summarize import load_summarize_chain\n",
|
||||
"\n",
|
||||
"# Create callback handler. This logs latency, errors, token usage, prompts, as well as prompt responses to Infino.\n",
|
||||
"handler = InfinoCallbackHandler(\n",
|
||||
|
@@ -170,8 +170,8 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.callbacks import LabelStudioCallbackHandler\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(\n",
|
||||
" temperature=0, callbacks=[LabelStudioCallbackHandler(project_name=\"My Project\")]\n",
|
||||
@@ -241,9 +241,9 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.callbacks import LabelStudioCallbackHandler\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.schema import HumanMessage, SystemMessage\n",
|
||||
"from langchain.callbacks import LabelStudioCallbackHandler\n",
|
||||
"\n",
|
||||
"chat_llm = ChatOpenAI(\n",
|
||||
" callbacks=[\n",
|
||||
|
@@ -110,7 +110,6 @@
|
||||
"source": [
|
||||
"import promptlayer # Don't forget this 🍰\n",
|
||||
"from langchain.callbacks import PromptLayerCallbackHandler\n",
|
||||
"\n",
|
||||
"from langchain.llms import GPT4All\n",
|
||||
"\n",
|
||||
"model = GPT4All(model=\"./models/gpt4all-model.bin\", n_ctx=512, n_threads=8)\n",
|
||||
|
@@ -76,15 +76,14 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.chains import LLMChain, SimpleSequentialChain\n",
|
||||
"from langchain.agents import initialize_agent, load_tools\n",
|
||||
"from langchain.callbacks import SageMakerCallbackHandler\n",
|
||||
"\n",
|
||||
"from langchain.chains import LLMChain, SimpleSequentialChain\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from sagemaker.analytics import ExperimentAnalytics\n",
|
||||
"from sagemaker.session import Session\n",
|
||||
"from sagemaker.experiments.run import Run"
|
||||
"from sagemaker.experiments.run import Run\n",
|
||||
"from sagemaker.session import Session"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -147,8 +147,8 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.callbacks import TrubricsCallbackHandler"
|
||||
"from langchain.callbacks import TrubricsCallbackHandler\n",
|
||||
"from langchain.llms import OpenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -265,9 +265,9 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.callbacks import TrubricsCallbackHandler\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.schema import HumanMessage, SystemMessage\n",
|
||||
"from langchain.callbacks import TrubricsCallbackHandler"
|
||||
"from langchain.schema import HumanMessage, SystemMessage"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -17,14 +17,14 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# !pip install openai"
|
||||
],
|
||||
"id": "d00d850917865298",
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"id": "d00d850917865298"
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# !pip install openai"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
@@ -99,7 +99,7 @@
|
||||
"source": [
|
||||
"import asyncio\n",
|
||||
"\n",
|
||||
"from langchain.schema import SystemMessage, HumanMessage\n",
|
||||
"from langchain.schema import HumanMessage, SystemMessage\n",
|
||||
"\n",
|
||||
"messages = [\n",
|
||||
" SystemMessage(content=\"You are a helpful AI that shares everything you know.\"),\n",
|
||||
|
@@ -59,9 +59,10 @@
|
||||
],
|
||||
"source": [
|
||||
"\"\"\"For basic init and call\"\"\"\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import QianfanChatEndpoint\n",
|
||||
"from langchain.chat_models.base import HumanMessage\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"QIANFAN_AK\"] = \"your_ak\"\n",
|
||||
"os.environ[\"QIANFAN_SK\"] = \"your_sk\"\n",
|
||||
|
@@ -64,7 +64,7 @@
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatEverlyAI\n",
|
||||
"from langchain.schema import SystemMessage, HumanMessage\n",
|
||||
"from langchain.schema import HumanMessage, SystemMessage\n",
|
||||
"\n",
|
||||
"messages = [\n",
|
||||
" SystemMessage(content=\"You are a helpful AI that shares everything you know.\"),\n",
|
||||
@@ -116,9 +116,9 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatEverlyAI\n",
|
||||
"from langchain.schema import SystemMessage, HumanMessage\n",
|
||||
"from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n",
|
||||
"from langchain.chat_models import ChatEverlyAI\n",
|
||||
"from langchain.schema import HumanMessage, SystemMessage\n",
|
||||
"\n",
|
||||
"messages = [\n",
|
||||
" SystemMessage(content=\"You are a humorous AI that delights people.\"),\n",
|
||||
@@ -174,9 +174,9 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatEverlyAI\n",
|
||||
"from langchain.schema import SystemMessage, HumanMessage\n",
|
||||
"from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n",
|
||||
"from langchain.chat_models import ChatEverlyAI\n",
|
||||
"from langchain.schema import HumanMessage, SystemMessage\n",
|
||||
"\n",
|
||||
"messages = [\n",
|
||||
" SystemMessage(content=\"You are a humorous AI that delights people.\"),\n",
|
||||
|
@@ -25,9 +25,10 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.chat_models.fireworks import ChatFireworks\n",
|
||||
"from langchain.schema import SystemMessage, HumanMessage\n",
|
||||
"import os"
|
||||
"from langchain.schema import HumanMessage, SystemMessage"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -49,8 +50,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"if \"FIREWORKS_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"FIREWORKS_API_KEY\"] = getpass.getpass(\"Fireworks API Key:\")\n",
|
||||
@@ -147,8 +148,8 @@
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatFireworks\n",
|
||||
"from langchain.memory import ConversationBufferMemory\n",
|
||||
"from langchain.schema.runnable import RunnablePassthrough\n",
|
||||
"from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
|
||||
"from langchain.schema.runnable import RunnablePassthrough\n",
|
||||
"\n",
|
||||
"llm = ChatFireworks(\n",
|
||||
" model=\"accounts/fireworks/models/llama-v2-13b-chat\",\n",
|
||||
|
@@ -2,14 +2,14 @@
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"source": [
|
||||
"# GigaChat\n",
|
||||
"This notebook shows how to use LangChain with [GigaChat](https://developers.sber.ru/portal/products/gigachat).\n",
|
||||
"To use you need to install ```gigachat``` python package."
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
@@ -24,44 +24,47 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"source": [
|
||||
"To get GigaChat credentials you need to [create account](https://developers.sber.ru/studio/login) and [get access to API](https://developers.sber.ru/docs/ru/gigachat/api/integration)\n",
|
||||
"## Example"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"GIGACHAT_CREDENTIALS\"] = getpass()"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import GigaChat\n",
|
||||
"\n",
|
||||
"chat = GigaChat(verify_ssl_certs=False)"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 31,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
@@ -72,7 +75,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.schema import SystemMessage, HumanMessage\n",
|
||||
"from langchain.schema import HumanMessage, SystemMessage\n",
|
||||
"\n",
|
||||
"messages = [\n",
|
||||
" SystemMessage(\n",
|
||||
@@ -82,10 +85,7 @@
|
||||
"]\n",
|
||||
"\n",
|
||||
"print(chat(messages).content)"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
@@ -22,8 +22,8 @@
|
||||
"from langchain.chat_models import JinaChat\n",
|
||||
"from langchain.prompts.chat import (\n",
|
||||
" ChatPromptTemplate,\n",
|
||||
" SystemMessagePromptTemplate,\n",
|
||||
" HumanMessagePromptTemplate,\n",
|
||||
" SystemMessagePromptTemplate,\n",
|
||||
")\n",
|
||||
"from langchain.schema import HumanMessage, SystemMessage"
|
||||
]
|
||||
|
@@ -59,9 +59,9 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_models import ChatOllama\n",
|
||||
"from langchain.callbacks.manager import CallbackManager\n",
|
||||
"from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n",
|
||||
"from langchain.chat_models import ChatOllama\n",
|
||||
"\n",
|
||||
"chat_model = ChatOllama(\n",
|
||||
" model=\"llama2:7b-chat\",\n",
|
||||
@@ -168,8 +168,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"from langchain.embeddings import OllamaEmbeddings\n",
|
||||
"from langchain.vectorstores import Chroma\n",
|
||||
"\n",
|
||||
"vectorstore = Chroma.from_documents(documents=all_splits, embedding=OllamaEmbeddings())"
|
||||
]
|
||||
@@ -224,9 +224,9 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Chat model\n",
|
||||
"from langchain.chat_models import ChatOllama\n",
|
||||
"from langchain.callbacks.manager import CallbackManager\n",
|
||||
"from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n",
|
||||
"from langchain.chat_models import ChatOllama\n",
|
||||
"\n",
|
||||
"chat_model = ChatOllama(\n",
|
||||
" model=\"llama2:13b\",\n",
|
||||
@@ -300,8 +300,8 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.schema import LLMResult\n",
|
||||
"from langchain.callbacks.base import BaseCallbackHandler\n",
|
||||
"from langchain.schema import LLMResult\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class GenerationStatisticsCallback(BaseCallbackHandler):\n",
|
||||
|
@@ -22,8 +22,8 @@
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts.chat import (\n",
|
||||
" ChatPromptTemplate,\n",
|
||||
" SystemMessagePromptTemplate,\n",
|
||||
" HumanMessagePromptTemplate,\n",
|
||||
" SystemMessagePromptTemplate,\n",
|
||||
")\n",
|
||||
"from langchain.schema import HumanMessage, SystemMessage"
|
||||
]
|
||||
|
@@ -30,8 +30,9 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"from langchain.chat_models.base import HumanMessage\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import PaiEasChatEndpoint\n",
|
||||
"from langchain.chat_models.base import HumanMessage\n",
|
||||
"\n",
|
||||
"os.environ[\"EAS_SERVICE_URL\"] = \"Your_EAS_Service_URL\"\n",
|
||||
"os.environ[\"EAS_SERVICE_TOKEN\"] = \"Your_EAS_Service_Token\"\n",
|
||||
|
@@ -51,6 +51,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from langchain.chat_models import PromptLayerChatOpenAI\n",
|
||||
"from langchain.schema import HumanMessage"
|
||||
]
|
||||
|
@@ -24,8 +24,8 @@
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.prompts.chat import (\n",
|
||||
" ChatPromptTemplate,\n",
|
||||
" SystemMessagePromptTemplate,\n",
|
||||
" HumanMessagePromptTemplate,\n",
|
||||
" SystemMessagePromptTemplate,\n",
|
||||
")\n",
|
||||
"from langchain.schema import HumanMessage, SystemMessage"
|
||||
]
|
||||
|
@@ -79,8 +79,8 @@
|
||||
"import re\n",
|
||||
"from typing import Iterator, List\n",
|
||||
"\n",
|
||||
"from langchain.schema import BaseMessage, HumanMessage\n",
|
||||
"from langchain.chat_loaders import base as chat_loaders\n",
|
||||
"from langchain.schema import BaseMessage, HumanMessage\n",
|
||||
"\n",
|
||||
"logger = logging.getLogger()\n",
|
||||
"\n",
|
||||
@@ -215,6 +215,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List\n",
|
||||
"\n",
|
||||
"from langchain.chat_loaders.base import ChatSession\n",
|
||||
"from langchain.chat_loaders.utils import (\n",
|
||||
" map_ai_messages,\n",
|
||||
|
@@ -47,9 +47,10 @@
|
||||
],
|
||||
"source": [
|
||||
"# This uses some example data\n",
|
||||
"import requests\n",
|
||||
"import zipfile\n",
|
||||
"\n",
|
||||
"import requests\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def download_and_unzip(url: str, output_path: str = \"file.zip\") -> None:\n",
|
||||
" file_id = url.split(\"/\")[-2]\n",
|
||||
@@ -106,8 +107,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_loaders.facebook_messenger import (\n",
|
||||
" SingleFileFacebookMessengerChatLoader,\n",
|
||||
" FolderFacebookMessengerChatLoader,\n",
|
||||
" SingleFileFacebookMessengerChatLoader,\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
@@ -201,8 +202,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.chat_loaders.utils import (\n",
|
||||
" merge_chat_runs,\n",
|
||||
" map_ai_messages,\n",
|
||||
" merge_chat_runs,\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
@@ -387,8 +388,8 @@
|
||||
],
|
||||
"source": [
|
||||
"import json\n",
|
||||
"from io import BytesIO\n",
|
||||
"import time\n",
|
||||
"from io import BytesIO\n",
|
||||
"\n",
|
||||
"import openai\n",
|
||||
"\n",
|
||||
|
@@ -36,6 +36,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os.path\n",
|
||||
"\n",
|
||||
"from google.auth.transport.requests import Request\n",
|
||||
"from google.oauth2.credentials import Credentials\n",
|
||||
"from google_auth_oauthlib.flow import InstalledAppFlow\n",
|
||||
|
@@ -115,6 +115,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List\n",
|
||||
"\n",
|
||||
"from langchain.chat_loaders.base import ChatSession\n",
|
||||
"from langchain.chat_loaders.utils import (\n",
|
||||
" map_ai_messages,\n",
|
||||
@@ -231,8 +232,8 @@
|
||||
],
|
||||
"source": [
|
||||
"import json\n",
|
||||
"from io import BytesIO\n",
|
||||
"import time\n",
|
||||
"from io import BytesIO\n",
|
||||
"\n",
|
||||
"import openai\n",
|
||||
"\n",
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user