mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-19 03:01:29 +00:00
Allow index name customization via env var in rag-conversation (#12315)
This commit is contained in:
parent
869a49a0ab
commit
28c39503eb
@ -1,4 +1,4 @@
|
|||||||
# Conversational RAG
|
# Conversational RAG
|
||||||
|
|
||||||
This template performs [conversational](https://python.langchain.com/docs/expression_language/cookbook/retrieval#conversational-retrieval-chain) [retrieval](https://python.langchain.com/docs/use_cases/question_answering/), which is one of the most popular LLM use-cases.
|
This template performs [conversational](https://python.langchain.com/docs/expression_language/cookbook/retrieval#conversational-retrieval-chain) [retrieval](https://python.langchain.com/docs/use_cases/question_answering/), which is one of the most popular LLM use-cases.
|
||||||
|
|
||||||
@ -10,4 +10,4 @@ Be sure that `OPENAI_API_KEY` is set in order to use the OpenAI models.
|
|||||||
|
|
||||||
## Pinecone
|
## Pinecone
|
||||||
|
|
||||||
Be sure that `PINECONE_API_KEY` is set in order to use Pinecone.
|
This template uses Pinecone as a vectorstore and requires that `PINECONE_API_KEY`, `PINECONE_ENVIRONMENT`, and `PINECONE_INDEX` are set.
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import os
|
||||||
from typing import Tuple, List
|
from typing import Tuple, List
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
@ -10,6 +11,14 @@ from langchain.prompts.prompt import PromptTemplate
|
|||||||
from langchain.schema.output_parser import StrOutputParser
|
from langchain.schema.output_parser import StrOutputParser
|
||||||
from langchain.schema.runnable import RunnablePassthrough, RunnableBranch, RunnableLambda, RunnableMap
|
from langchain.schema.runnable import RunnablePassthrough, RunnableBranch, RunnableLambda, RunnableMap
|
||||||
|
|
||||||
|
if os.environ.get("PINECONE_API_KEY", None) is None:
|
||||||
|
raise Exception("Missing `PINECONE_API_KEY` environment variable.")
|
||||||
|
|
||||||
|
if os.environ.get("PINECONE_ENVIRONMENT", None) is None:
|
||||||
|
raise Exception("Missing `PINECONE_ENVIRONMENT` environment variable.")
|
||||||
|
|
||||||
|
PINECONE_INDEX_NAME = os.environ.get("PINECONE_INDEX", "langchain-test")
|
||||||
|
|
||||||
### Ingest code - you may need to run this the first time
|
### Ingest code - you may need to run this the first time
|
||||||
# Load
|
# Load
|
||||||
# from langchain.document_loaders import WebBaseLoader
|
# from langchain.document_loaders import WebBaseLoader
|
||||||
@ -20,14 +29,14 @@ from langchain.schema.runnable import RunnablePassthrough, RunnableBranch, Runna
|
|||||||
# from langchain.text_splitter import RecursiveCharacterTextSplitter
|
# from langchain.text_splitter import RecursiveCharacterTextSplitter
|
||||||
# text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0)
|
# text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0)
|
||||||
# all_splits = text_splitter.split_documents(data)
|
# all_splits = text_splitter.split_documents(data)
|
||||||
#
|
|
||||||
# # Add to vectorDB
|
# # Add to vectorDB
|
||||||
# vectorstore = Pinecone.from_documents(
|
# vectorstore = Pinecone.from_documents(
|
||||||
# documents=all_splits, embedding=OpenAIEmbeddings(), index_name='langchain-test'
|
# documents=all_splits, embedding=OpenAIEmbeddings(), index_name=PINECONE_INDEX_NAME
|
||||||
# )
|
# )
|
||||||
# retriever = vectorstore.as_retriever()
|
# retriever = vectorstore.as_retriever()
|
||||||
|
|
||||||
vectorstore = Pinecone.from_existing_index("langchain-test", OpenAIEmbeddings())
|
vectorstore = Pinecone.from_existing_index(PINECONE_INDEX_NAME, OpenAIEmbeddings())
|
||||||
retriever = vectorstore.as_retriever()
|
retriever = vectorstore.as_retriever()
|
||||||
|
|
||||||
# Condense a chat history and follow-up question into a standalone question
|
# Condense a chat history and follow-up question into a standalone question
|
||||||
@ -62,9 +71,9 @@ def _format_chat_history(chat_history: List[Tuple[str, str]]) -> List:
|
|||||||
buffer.append(AIMessage(content=ai))
|
buffer.append(AIMessage(content=ai))
|
||||||
return buffer
|
return buffer
|
||||||
|
|
||||||
# User input
|
# User input
|
||||||
class ChatHistory(BaseModel):
|
class ChatHistory(BaseModel):
|
||||||
chat_history: List[Tuple[str, str]]
|
chat_history: List[Tuple[str, str]]
|
||||||
question: str
|
question: str
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user