mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-01 19:12:42 +00:00
Add template for Pinecone + Multi-Query (#12353)
This commit is contained in:
@@ -1,84 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "681a5d1e",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Connect to template\n",
|
||||
"\n",
|
||||
"`Context`\n",
|
||||
" \n",
|
||||
"* LangServe apps gives you access to templates.\n",
|
||||
"* Templates LLM pipeline (runnables or chains) end-points accessible via FastAPI.\n",
|
||||
"* The environment for these templates is managed by Poetry.\n",
|
||||
"\n",
|
||||
"`Create app`\n",
|
||||
"\n",
|
||||
"* Install LangServe and create an app.\n",
|
||||
"* This will create a new Poetry environment /\n",
|
||||
"```\n",
|
||||
"pip install < to add > \n",
|
||||
"langchain serve new my-app\n",
|
||||
"cd my-app\n",
|
||||
"```\n",
|
||||
"\n",
|
||||
"`Add templates`\n",
|
||||
"\n",
|
||||
"* When we add a template, we update the Poetry config file with the necessary dependencies.\n",
|
||||
"* It also automatically installed these template dependencies in your Poetry environment\n",
|
||||
"```\n",
|
||||
"langchain serve add rag-pinecone\n",
|
||||
"```\n",
|
||||
"\n",
|
||||
"`Start FastAPI server`\n",
|
||||
"\n",
|
||||
"```\n",
|
||||
"langchain start\n",
|
||||
"```\n",
|
||||
"\n",
|
||||
"Note, we can now look at the endpoints:\n",
|
||||
"\n",
|
||||
"http://127.0.0.1:8000/docs#\n",
|
||||
"\n",
|
||||
"And look specifically at our loaded template:\n",
|
||||
"\n",
|
||||
"http://127.0.0.1:8000/docs#/default/invoke_rag_pinecone_invoke_post\n",
|
||||
" \n",
|
||||
"We can also use remote runnable to call it:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d774be2a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langserve.client import RemoteRunnable\n",
|
||||
"rag_app_pinecone = RemoteRunnable('http://localhost:8000/rag-pinecone')"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.16"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
45
templates/rag-pinecone/rag_pinecone.ipynb
Normal file
45
templates/rag-pinecone/rag_pinecone.ipynb
Normal file
@@ -0,0 +1,45 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "681a5d1e",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Connect to template"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d774be2a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langserve.client import RemoteRunnable\n",
|
||||
"rag_app_pinecone = RemoteRunnable('http://localhost:8000/rag-pinecone')\n",
|
||||
"rag_app_pinecone.invoke(\"How does agent memory work?\")"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.16"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
@@ -8,25 +8,32 @@ from langchain.embeddings import OpenAIEmbeddings
|
||||
from langchain.schema.output_parser import StrOutputParser
|
||||
from langchain.schema.runnable import RunnablePassthrough, RunnableParallel
|
||||
|
||||
# Pinecone init
|
||||
# Find API key in console at app.pinecone.io
|
||||
YOUR_API_KEY = os.getenv('PINECONE_API_KEY') or 'PINECONE_API_KEY'
|
||||
# Find ENV (cloud region) next to API key in console
|
||||
YOUR_ENV = os.getenv('PINECONE_ENVIRONMENT') or 'PINECONE_ENV'
|
||||
# Init
|
||||
pinecone.init(
|
||||
api_key=YOUR_API_KEY,
|
||||
environment=YOUR_ENV
|
||||
)
|
||||
if os.environ.get("PINECONE_API_KEY", None) is None:
|
||||
raise Exception("Missing `PINECONE_API_KEY` environment variable.")
|
||||
|
||||
# Get vectorstore
|
||||
text_field = "text"
|
||||
index_name = "langchain-multi-query-demo"
|
||||
index = pinecone.Index(index_name)
|
||||
vectorstore = Pinecone(index,
|
||||
OpenAIEmbeddings(),
|
||||
text_field)
|
||||
if os.environ.get("PINECONE_ENVIRONMENT", None) is None:
|
||||
raise Exception("Missing `PINECONE_ENVIRONMENT` environment variable.")
|
||||
|
||||
PINECONE_INDEX_NAME = os.environ.get("PINECONE_INDEX", "langchain-test")
|
||||
|
||||
### Ingest code - you may need to run this the first time
|
||||
# Load
|
||||
# from langchain.document_loaders import WebBaseLoader
|
||||
# loader = WebBaseLoader("https://lilianweng.github.io/posts/2023-06-23-agent/")
|
||||
# data = loader.load()
|
||||
|
||||
# # Split
|
||||
# from langchain.text_splitter import RecursiveCharacterTextSplitter
|
||||
# text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0)
|
||||
# all_splits = text_splitter.split_documents(data)
|
||||
|
||||
# # Add to vectorDB
|
||||
# vectorstore = Pinecone.from_documents(
|
||||
# documents=all_splits, embedding=OpenAIEmbeddings(), index_name=PINECONE_INDEX_NAME
|
||||
# )
|
||||
# retriever = vectorstore.as_retriever()
|
||||
|
||||
vectorstore = Pinecone.from_existing_index(PINECONE_INDEX_NAME, OpenAIEmbeddings())
|
||||
retriever = vectorstore.as_retriever()
|
||||
|
||||
# RAG prompt
|
||||
|
Reference in New Issue
Block a user