Add rag google vertex ai search template (#13294)

- **Description:** This is a template demonstrating how to utilize
Google Vertex AI Search in conjunction with ChatVertexAI()
This commit is contained in:
juan-calvo-datatonic
2023-11-13 17:45:36 +01:00
committed by GitHub
parent 9024593468
commit 545b76b0fd
8 changed files with 2382 additions and 0 deletions

View File

@@ -0,0 +1,50 @@
import os
from langchain.chat_models import ChatVertexAI
from langchain.prompts import ChatPromptTemplate
from langchain.pydantic_v1 import BaseModel
from langchain.retrievers import GoogleVertexAISearchRetriever
from langchain.schema.output_parser import StrOutputParser
from langchain.schema.runnable import RunnableParallel, RunnablePassthrough
# Get region and profile from env
project_id = os.environ.get("GOOGLE_CLOUD_PROJECT_ID")
data_store_id = os.environ.get("DATA_STORE_ID")
model_type = os.environ.get("MODEL_TYPE")
if not data_store_id:
raise ValueError(
"No value provided in env variable 'DATA_STORE_ID'. "
"A data store is required to run this application."
)
# Set LLM and embeddings
model = ChatVertexAI(model_name=model_type, temperature=0.0)
# Create Kendra retriever
retriever = GoogleVertexAISearchRetriever(
project_id=project_id, search_engine_id=data_store_id
)
# RAG prompt
template = """Answer the question based only on the following context:
{context}
Question: {question}
"""
prompt = ChatPromptTemplate.from_template(template)
# RAG
chain = (
RunnableParallel({"context": retriever, "question": RunnablePassthrough()})
| prompt
| model
| StrOutputParser()
)
# Add typing for input
class Question(BaseModel):
__root__: str
chain = chain.with_types(input_type=Question)