chatmodel: remove the 'prompt' field from ChatItem (#3016)

Signed-off-by: Adam Treat <treat.adam@gmail.com>
This commit is contained in:
AT 2024-10-01 13:57:19 -04:00 committed by GitHub
parent 88b95950c5
commit ee67cca885
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 24 additions and 25 deletions

View File

@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
### Changed
- Rebase llama.cpp on latest upstream as of September 26th ([#2998](https://github.com/nomic-ai/gpt4all/pull/2998))
- Change the error message when a message is too long ([#3004](https://github.com/nomic-ai/gpt4all/pull/3004))
- Simplify chatmodel to get rid of unnecessary field and bump chat version ([#3016](https://github.com/nomic-ai/gpt4all/pull/3016))
### Fixed
- Fix a crash when attempting to continue a chat loaded from disk ([#2995](https://github.com/nomic-ai/gpt4all/pull/2995))

View File

@ -1694,19 +1694,21 @@ Rectangle {
imageHeight: 20
visible: chatModel.count && !currentChat.isServer && currentChat.isModelLoaded && !currentChat.responseInProgress
onClicked: {
var index = Math.max(0, chatModel.count - 1);
var listElement = chatModel.get(index);
if (chatModel.count < 2)
return
var promptIndex = chatModel.count - 2
var promptElement = chatModel.get(promptIndex)
var responseIndex = chatModel.count - 1
var responseElement = chatModel.get(responseIndex)
if (promptElement.name !== "Prompt: " || responseElement.name !== "Response: ")
return
currentChat.regenerateResponse()
if (chatModel.count) {
if (listElement.name === "Response: ") {
chatModel.updateCurrentResponse(index, true);
chatModel.updateStopped(index, false);
chatModel.updateThumbsUpState(index, false);
chatModel.updateThumbsDownState(index, false);
chatModel.updateNewResponse(index, "");
currentChat.prompt(listElement.prompt)
}
}
chatModel.updateCurrentResponse(responseIndex, true)
chatModel.updateStopped(responseIndex, false)
chatModel.updateThumbsUpState(responseIndex, false)
chatModel.updateThumbsDownState(responseIndex, false)
chatModel.updateNewResponse(responseIndex, "")
currentChat.prompt(promptElement.value)
}
ToolTip.visible: regenerateButton.hovered
ToolTip.text: qsTr("Redo last chat response")

View File

@ -240,7 +240,7 @@ void Chat::newPromptResponsePair(const QString &prompt)
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
// the prompt is passed as the prompt item's value and the response item's prompt
m_chatModel->appendPrompt("Prompt: ", prompt);
m_chatModel->appendResponse("Response: ", prompt);
m_chatModel->appendResponse("Response: ");
emit resetResponseRequested();
}
@ -251,7 +251,7 @@ void Chat::serverNewPromptResponsePair(const QString &prompt)
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
// the prompt is passed as the prompt item's value and the response item's prompt
m_chatModel->appendPrompt("Prompt: ", prompt);
m_chatModel->appendResponse("Response: ", prompt);
m_chatModel->appendResponse("Response: ");
}
bool Chat::restoringFromText() const

View File

@ -19,7 +19,7 @@
#include <algorithm>
#define CHAT_FORMAT_MAGIC 0xF5D553CC
#define CHAT_FORMAT_VERSION 9
#define CHAT_FORMAT_VERSION 10
class MyChatListModel: public ChatListModel { };
Q_GLOBAL_STATIC(MyChatListModel, chatListModelInstance)

View File

@ -22,7 +22,6 @@ struct ChatItem
Q_PROPERTY(int id MEMBER id)
Q_PROPERTY(QString name MEMBER name)
Q_PROPERTY(QString value MEMBER value)
Q_PROPERTY(QString prompt MEMBER prompt)
Q_PROPERTY(QString newResponse MEMBER newResponse)
Q_PROPERTY(bool currentResponse MEMBER currentResponse)
Q_PROPERTY(bool stopped MEMBER stopped)
@ -36,7 +35,6 @@ public:
int id = 0;
QString name;
QString value;
QString prompt;
QString newResponse;
QList<ResultInfo> sources;
QList<ResultInfo> consolidatedSources;
@ -59,7 +57,6 @@ public:
IdRole = Qt::UserRole + 1,
NameRole,
ValueRole,
PromptRole,
NewResponseRole,
CurrentResponseRole,
StoppedRole,
@ -88,8 +85,6 @@ public:
return item.name;
case ValueRole:
return item.value;
case PromptRole:
return item.prompt;
case NewResponseRole:
return item.newResponse;
case CurrentResponseRole:
@ -115,7 +110,6 @@ public:
roles[IdRole] = "id";
roles[NameRole] = "name";
roles[ValueRole] = "value";
roles[PromptRole] = "prompt";
roles[NewResponseRole] = "newResponse";
roles[CurrentResponseRole] = "currentResponse";
roles[StoppedRole] = "stopped";
@ -137,12 +131,11 @@ public:
emit countChanged();
}
void appendResponse(const QString &name, const QString &prompt)
void appendResponse(const QString &name)
{
ChatItem item;
item.id = m_chatItems.count(); // This is only relevant for responses
item.name = name;
item.prompt = prompt;
item.currentResponse = true;
beginInsertRows(QModelIndex(), m_chatItems.size(), m_chatItems.size());
m_chatItems.append(item);
@ -266,7 +259,6 @@ public:
stream << c.id;
stream << c.name;
stream << c.value;
stream << c.prompt;
stream << c.newResponse;
stream << c.currentResponse;
stream << c.stopped;
@ -336,7 +328,11 @@ public:
stream >> c.id;
stream >> c.name;
stream >> c.value;
stream >> c.prompt;
if (version < 10) {
// This is deprecated and no longer used
QString prompt;
stream >> prompt;
}
stream >> c.newResponse;
stream >> c.currentResponse;
stream >> c.stopped;