Format Templates (#12396)

This commit is contained in:
Erick Friis
2023-10-26 19:44:30 -07:00
committed by GitHub
parent 25c98dbba9
commit 4b16601d33
59 changed files with 800 additions and 441 deletions

View File

@@ -1,13 +1,15 @@
from langchain.chat_models import ChatOpenAI
from langchain.schema.output_parser import StrOutputParser
from langchain.schema.runnable import RunnablePassthrough, RunnableMap
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores.elasticsearch import ElasticsearchStore
from langchain.schema import format_document
from typing import Tuple, List
from operator import itemgetter
from .prompts import CONDENSE_QUESTION_PROMPT, LLM_CONTEXT_PROMPT, DOCUMENT_PROMPT
from typing import List, Tuple
from langchain.chat_models import ChatOpenAI
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.schema import format_document
from langchain.schema.output_parser import StrOutputParser
from langchain.schema.runnable import RunnableMap, RunnablePassthrough
from langchain.vectorstores.elasticsearch import ElasticsearchStore
from .connection import es_connection_details
from .prompts import CONDENSE_QUESTION_PROMPT, DOCUMENT_PROMPT, LLM_CONTEXT_PROMPT
# Setup connecting to Elasticsearch
vectorstore = ElasticsearchStore(

View File

@@ -6,7 +6,7 @@ condense_question_prompt_template = """Given the following conversation and a fo
Chat History:
{chat_history}
Follow Up Input: {question}
"""
""" # noqa: E501
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(
condense_question_prompt_template
)
@@ -23,7 +23,7 @@ If you don't know the answer, just say that you don't know, don't try to make up
{context}
----
Question: {question}
"""
""" # noqa: E501
LLM_CONTEXT_PROMPT = ChatPromptTemplate.from_template(llm_context_prompt_template)