chat: fix regression in regenerate from #2929 (#3011)

Signed-off-by: Jared Van Bortel <jared@nomic.ai>
This commit is contained in:
Jared Van Bortel
2024-09-30 19:42:10 -04:00
committed by GitHub
parent 62bc84366b
commit 3025f9deff
2 changed files with 5 additions and 2 deletions

View File

@@ -238,8 +238,9 @@ void Chat::newPromptResponsePair(const QString &prompt)
{
resetResponseState();
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
// the prompt is passed as the prompt item's value and the response item's prompt
m_chatModel->appendPrompt("Prompt: ", prompt);
m_chatModel->appendResponse("Response: ", QString());
m_chatModel->appendResponse("Response: ", prompt);
emit resetResponseRequested();
}
@@ -248,8 +249,9 @@ void Chat::serverNewPromptResponsePair(const QString &prompt)
{
resetResponseState();
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
// the prompt is passed as the prompt item's value and the response item's prompt
m_chatModel->appendPrompt("Prompt: ", prompt);
m_chatModel->appendResponse("Response: ", QString());
m_chatModel->appendResponse("Response: ", prompt);
}
bool Chat::restoringFromText() const