mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-12 12:59:07 +00:00
@@ -0,0 +1,3 @@
|
||||
from neo4j_advanced_rag.chain import chain
|
||||
|
||||
__all__ = ["chain"]
|
51
templates/neo4j-advanced-rag/neo4j_advanced_rag/chain.py
Normal file
51
templates/neo4j-advanced-rag/neo4j_advanced_rag/chain.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from operator import itemgetter
|
||||
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain.prompts import ChatPromptTemplate
|
||||
from langchain.pydantic_v1 import BaseModel
|
||||
from langchain.schema.output_parser import StrOutputParser
|
||||
from langchain.schema.runnable import ConfigurableField, RunnableParallel
|
||||
|
||||
from neo4j_advanced_rag.retrievers import (
|
||||
hypothetic_question_vectorstore,
|
||||
parent_vectorstore,
|
||||
summary_vectorstore,
|
||||
typical_rag,
|
||||
)
|
||||
|
||||
template = """Answer the question based only on the following context:
|
||||
{context}
|
||||
|
||||
Question: {question}
|
||||
"""
|
||||
prompt = ChatPromptTemplate.from_template(template)
|
||||
|
||||
model = ChatOpenAI()
|
||||
|
||||
retriever = typical_rag.as_retriever().configurable_alternatives(
|
||||
ConfigurableField(id="strategy"),
|
||||
default_key="typical_rag",
|
||||
parent_strategy=parent_vectorstore.as_retriever(),
|
||||
hypothetical_questions=hypothetic_question_vectorstore.as_retriever(),
|
||||
summary_strategy=summary_vectorstore.as_retriever(),
|
||||
)
|
||||
|
||||
chain = (
|
||||
RunnableParallel(
|
||||
{
|
||||
"context": itemgetter("question") | retriever,
|
||||
"question": itemgetter("question"),
|
||||
}
|
||||
)
|
||||
| prompt
|
||||
| model
|
||||
| StrOutputParser()
|
||||
)
|
||||
|
||||
|
||||
# Add typing for input
|
||||
class Question(BaseModel):
|
||||
question: str
|
||||
|
||||
|
||||
chain = chain.with_types(input_type=Question)
|
@@ -0,0 +1,49 @@
|
||||
from langchain.embeddings import OpenAIEmbeddings
|
||||
from langchain.vectorstores import Neo4jVector
|
||||
|
||||
# Typical RAG retriever
|
||||
|
||||
typical_rag = Neo4jVector.from_existing_index(
|
||||
OpenAIEmbeddings(), index_name="typical_rag"
|
||||
)
|
||||
|
||||
# Parent retriever
|
||||
|
||||
parent_query = """
|
||||
MATCH (node)<-[:HAS_CHILD]-(parent)
|
||||
WITH parent, max(score) AS score // deduplicate parents
|
||||
RETURN parent.text AS text, score, {} AS metadata LIMIT 1
|
||||
"""
|
||||
|
||||
parent_vectorstore = Neo4jVector.from_existing_index(
|
||||
OpenAIEmbeddings(),
|
||||
index_name="parent_document",
|
||||
retrieval_query=parent_query,
|
||||
)
|
||||
|
||||
# Hypothetic questions retriever
|
||||
|
||||
hypothetic_question_query = """
|
||||
MATCH (node)<-[:HAS_QUESTION]-(parent)
|
||||
WITH parent, max(score) AS score // deduplicate parents
|
||||
RETURN parent.text AS text, score, {} AS metadata
|
||||
"""
|
||||
|
||||
hypothetic_question_vectorstore = Neo4jVector.from_existing_index(
|
||||
OpenAIEmbeddings(),
|
||||
index_name="hypothetical_questions",
|
||||
retrieval_query=hypothetic_question_query,
|
||||
)
|
||||
# Summary retriever
|
||||
|
||||
summary_query = """
|
||||
MATCH (node)<-[:HAS_SUMMARY]-(parent)
|
||||
WITH parent, max(score) AS score // deduplicate parents
|
||||
RETURN parent.text AS text, score, {} AS metadata
|
||||
"""
|
||||
|
||||
summary_vectorstore = Neo4jVector.from_existing_index(
|
||||
OpenAIEmbeddings(),
|
||||
index_name="summary",
|
||||
retrieval_query=summary_query,
|
||||
)
|
Reference in New Issue
Block a user