fix #3117
This commit is contained in:
Bagatur 2023-08-31 07:29:49 -07:00 committed by GitHub
commit 29270e0378
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 31 additions and 44 deletions

View File

@ -20,10 +20,8 @@ DEFAULT_REFINE_PROMPT_TMPL = (
"answer the question. " "answer the question. "
"If the context isn't useful, return the original answer." "If the context isn't useful, return the original answer."
) )
DEFAULT_REFINE_PROMPT = PromptTemplate( DEFAULT_REFINE_PROMPT = PromptTemplate.from_template(DEFAULT_REFINE_PROMPT_TMPL)
input_variables=["question", "existing_answer", "context_str"],
template=DEFAULT_REFINE_PROMPT_TMPL,
)
refine_template = ( refine_template = (
"We have the opportunity to refine the existing answer " "We have the opportunity to refine the existing answer "
"(only if needed) with some more context below.\n" "(only if needed) with some more context below.\n"
@ -34,12 +32,9 @@ refine_template = (
"answer the question. " "answer the question. "
"If the context isn't useful, return the original answer." "If the context isn't useful, return the original answer."
) )
messages = [ CHAT_REFINE_PROMPT = ChatPromptTemplate.from_messages(
HumanMessagePromptTemplate.from_template("{question}"), [("human", "{question}"), ("ai", "{existing_answer}"), ("human", "refine_template")]
AIMessagePromptTemplate.from_template("{existing_answer}"), )
HumanMessagePromptTemplate.from_template(refine_template),
]
CHAT_REFINE_PROMPT = ChatPromptTemplate.from_messages(messages)
REFINE_PROMPT_SELECTOR = ConditionalPromptSelector( REFINE_PROMPT_SELECTOR = ConditionalPromptSelector(
default_prompt=DEFAULT_REFINE_PROMPT, default_prompt=DEFAULT_REFINE_PROMPT,
conditionals=[(is_chat_model, CHAT_REFINE_PROMPT)], conditionals=[(is_chat_model, CHAT_REFINE_PROMPT)],
@ -48,28 +43,25 @@ REFINE_PROMPT_SELECTOR = ConditionalPromptSelector(
DEFAULT_TEXT_QA_PROMPT_TMPL = ( DEFAULT_TEXT_QA_PROMPT_TMPL = (
"Context information is below. \n" "Context information is below. \n"
"---------------------\n" "------------\n"
"{context_str}" "{context_str}\n"
"\n---------------------\n" "------------\n"
"Given the context information and not prior knowledge, " "Given the context information and not prior knowledge, "
"answer the question: {question}\n" "answer the question: {question}\n"
) )
DEFAULT_TEXT_QA_PROMPT = PromptTemplate( DEFAULT_TEXT_QA_PROMPT = PromptTemplate.from_template(DEFAULT_TEXT_QA_PROMPT_TMPL)
input_variables=["context_str", "question"], template=DEFAULT_TEXT_QA_PROMPT_TMPL
)
chat_qa_prompt_template = ( chat_qa_prompt_template = (
"Context information is below.\n" "Context information is below.\n"
"---------------------\n" "------------\n"
"{context_str}" "{context_str}\n"
"\n---------------------\n" "------------\n"
"Given the context information and not prior knowledge, " "Given the context information and not prior knowledge, "
"answer any questions" "answer any questions"
) )
messages = [ CHAT_QUESTION_PROMPT = ChatPromptTemplate.from_messages(
SystemMessagePromptTemplate.from_template(chat_qa_prompt_template), [("system", chat_qa_prompt_template), ("human", "{question}")]
HumanMessagePromptTemplate.from_template("{question}"), )
]
CHAT_QUESTION_PROMPT = ChatPromptTemplate.from_messages(messages)
QUESTION_PROMPT_SELECTOR = ConditionalPromptSelector( QUESTION_PROMPT_SELECTOR = ConditionalPromptSelector(
default_prompt=DEFAULT_TEXT_QA_PROMPT, default_prompt=DEFAULT_TEXT_QA_PROMPT,
conditionals=[(is_chat_model, CHAT_QUESTION_PROMPT)], conditionals=[(is_chat_model, CHAT_QUESTION_PROMPT)],

View File

@ -1,21 +1,16 @@
# flake8: noqa
from langchain.prompts import PromptTemplate from langchain.prompts import PromptTemplate
REFINE_PROMPT_TMPL = ( REFINE_PROMPT_TMPL = """\
"Your job is to produce a final summary\n" Your job is to produce a final summary.
"We have provided an existing summary up to a certain point: {existing_answer}\n" We have provided an existing summary up to a certain point: {existing_answer}
"We have the opportunity to refine the existing summary" We have the opportunity to refine the existing summary (only if needed) with some more context below.
"(only if needed) with some more context below.\n" ------------
"------------\n" {text}
"{text}\n" ------------
"------------\n" Given the new context, refine the original summary.
"Given the new context, refine the original summary\n" If the context isn't useful, return the original summary.\
"If the context isn't useful, return the original summary." """ # noqa: E501
) REFINE_PROMPT = PromptTemplate.from_template(REFINE_PROMPT_TMPL)
REFINE_PROMPT = PromptTemplate(
input_variables=["existing_answer", "text"],
template=REFINE_PROMPT_TMPL,
)
prompt_template = """Write a concise summary of the following: prompt_template = """Write a concise summary of the following:
@ -25,4 +20,4 @@ prompt_template = """Write a concise summary of the following:
CONCISE SUMMARY:""" CONCISE SUMMARY:"""
PROMPT = PromptTemplate(template=prompt_template, input_variables=["text"]) PROMPT = PromptTemplate.from_template(prompt_template)