mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-07-18 17:32:00 +00:00
chatmodel: remove the 'prompt' field from ChatItem (#3016)
Signed-off-by: Adam Treat <treat.adam@gmail.com>
This commit is contained in:
parent
88b95950c5
commit
ee67cca885
@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
|||||||
### Changed
|
### Changed
|
||||||
- Rebase llama.cpp on latest upstream as of September 26th ([#2998](https://github.com/nomic-ai/gpt4all/pull/2998))
|
- Rebase llama.cpp on latest upstream as of September 26th ([#2998](https://github.com/nomic-ai/gpt4all/pull/2998))
|
||||||
- Change the error message when a message is too long ([#3004](https://github.com/nomic-ai/gpt4all/pull/3004))
|
- Change the error message when a message is too long ([#3004](https://github.com/nomic-ai/gpt4all/pull/3004))
|
||||||
|
- Simplify chatmodel to get rid of unnecessary field and bump chat version ([#3016](https://github.com/nomic-ai/gpt4all/pull/3016))
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Fix a crash when attempting to continue a chat loaded from disk ([#2995](https://github.com/nomic-ai/gpt4all/pull/2995))
|
- Fix a crash when attempting to continue a chat loaded from disk ([#2995](https://github.com/nomic-ai/gpt4all/pull/2995))
|
||||||
|
@ -1694,19 +1694,21 @@ Rectangle {
|
|||||||
imageHeight: 20
|
imageHeight: 20
|
||||||
visible: chatModel.count && !currentChat.isServer && currentChat.isModelLoaded && !currentChat.responseInProgress
|
visible: chatModel.count && !currentChat.isServer && currentChat.isModelLoaded && !currentChat.responseInProgress
|
||||||
onClicked: {
|
onClicked: {
|
||||||
var index = Math.max(0, chatModel.count - 1);
|
if (chatModel.count < 2)
|
||||||
var listElement = chatModel.get(index);
|
return
|
||||||
|
var promptIndex = chatModel.count - 2
|
||||||
|
var promptElement = chatModel.get(promptIndex)
|
||||||
|
var responseIndex = chatModel.count - 1
|
||||||
|
var responseElement = chatModel.get(responseIndex)
|
||||||
|
if (promptElement.name !== "Prompt: " || responseElement.name !== "Response: ")
|
||||||
|
return
|
||||||
currentChat.regenerateResponse()
|
currentChat.regenerateResponse()
|
||||||
if (chatModel.count) {
|
chatModel.updateCurrentResponse(responseIndex, true)
|
||||||
if (listElement.name === "Response: ") {
|
chatModel.updateStopped(responseIndex, false)
|
||||||
chatModel.updateCurrentResponse(index, true);
|
chatModel.updateThumbsUpState(responseIndex, false)
|
||||||
chatModel.updateStopped(index, false);
|
chatModel.updateThumbsDownState(responseIndex, false)
|
||||||
chatModel.updateThumbsUpState(index, false);
|
chatModel.updateNewResponse(responseIndex, "")
|
||||||
chatModel.updateThumbsDownState(index, false);
|
currentChat.prompt(promptElement.value)
|
||||||
chatModel.updateNewResponse(index, "");
|
|
||||||
currentChat.prompt(listElement.prompt)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
ToolTip.visible: regenerateButton.hovered
|
ToolTip.visible: regenerateButton.hovered
|
||||||
ToolTip.text: qsTr("Redo last chat response")
|
ToolTip.text: qsTr("Redo last chat response")
|
||||||
|
@ -240,7 +240,7 @@ void Chat::newPromptResponsePair(const QString &prompt)
|
|||||||
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
|
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
|
||||||
// the prompt is passed as the prompt item's value and the response item's prompt
|
// the prompt is passed as the prompt item's value and the response item's prompt
|
||||||
m_chatModel->appendPrompt("Prompt: ", prompt);
|
m_chatModel->appendPrompt("Prompt: ", prompt);
|
||||||
m_chatModel->appendResponse("Response: ", prompt);
|
m_chatModel->appendResponse("Response: ");
|
||||||
emit resetResponseRequested();
|
emit resetResponseRequested();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -251,7 +251,7 @@ void Chat::serverNewPromptResponsePair(const QString &prompt)
|
|||||||
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
|
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
|
||||||
// the prompt is passed as the prompt item's value and the response item's prompt
|
// the prompt is passed as the prompt item's value and the response item's prompt
|
||||||
m_chatModel->appendPrompt("Prompt: ", prompt);
|
m_chatModel->appendPrompt("Prompt: ", prompt);
|
||||||
m_chatModel->appendResponse("Response: ", prompt);
|
m_chatModel->appendResponse("Response: ");
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Chat::restoringFromText() const
|
bool Chat::restoringFromText() const
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
|
||||||
#define CHAT_FORMAT_MAGIC 0xF5D553CC
|
#define CHAT_FORMAT_MAGIC 0xF5D553CC
|
||||||
#define CHAT_FORMAT_VERSION 9
|
#define CHAT_FORMAT_VERSION 10
|
||||||
|
|
||||||
class MyChatListModel: public ChatListModel { };
|
class MyChatListModel: public ChatListModel { };
|
||||||
Q_GLOBAL_STATIC(MyChatListModel, chatListModelInstance)
|
Q_GLOBAL_STATIC(MyChatListModel, chatListModelInstance)
|
||||||
|
@ -22,7 +22,6 @@ struct ChatItem
|
|||||||
Q_PROPERTY(int id MEMBER id)
|
Q_PROPERTY(int id MEMBER id)
|
||||||
Q_PROPERTY(QString name MEMBER name)
|
Q_PROPERTY(QString name MEMBER name)
|
||||||
Q_PROPERTY(QString value MEMBER value)
|
Q_PROPERTY(QString value MEMBER value)
|
||||||
Q_PROPERTY(QString prompt MEMBER prompt)
|
|
||||||
Q_PROPERTY(QString newResponse MEMBER newResponse)
|
Q_PROPERTY(QString newResponse MEMBER newResponse)
|
||||||
Q_PROPERTY(bool currentResponse MEMBER currentResponse)
|
Q_PROPERTY(bool currentResponse MEMBER currentResponse)
|
||||||
Q_PROPERTY(bool stopped MEMBER stopped)
|
Q_PROPERTY(bool stopped MEMBER stopped)
|
||||||
@ -36,7 +35,6 @@ public:
|
|||||||
int id = 0;
|
int id = 0;
|
||||||
QString name;
|
QString name;
|
||||||
QString value;
|
QString value;
|
||||||
QString prompt;
|
|
||||||
QString newResponse;
|
QString newResponse;
|
||||||
QList<ResultInfo> sources;
|
QList<ResultInfo> sources;
|
||||||
QList<ResultInfo> consolidatedSources;
|
QList<ResultInfo> consolidatedSources;
|
||||||
@ -59,7 +57,6 @@ public:
|
|||||||
IdRole = Qt::UserRole + 1,
|
IdRole = Qt::UserRole + 1,
|
||||||
NameRole,
|
NameRole,
|
||||||
ValueRole,
|
ValueRole,
|
||||||
PromptRole,
|
|
||||||
NewResponseRole,
|
NewResponseRole,
|
||||||
CurrentResponseRole,
|
CurrentResponseRole,
|
||||||
StoppedRole,
|
StoppedRole,
|
||||||
@ -88,8 +85,6 @@ public:
|
|||||||
return item.name;
|
return item.name;
|
||||||
case ValueRole:
|
case ValueRole:
|
||||||
return item.value;
|
return item.value;
|
||||||
case PromptRole:
|
|
||||||
return item.prompt;
|
|
||||||
case NewResponseRole:
|
case NewResponseRole:
|
||||||
return item.newResponse;
|
return item.newResponse;
|
||||||
case CurrentResponseRole:
|
case CurrentResponseRole:
|
||||||
@ -115,7 +110,6 @@ public:
|
|||||||
roles[IdRole] = "id";
|
roles[IdRole] = "id";
|
||||||
roles[NameRole] = "name";
|
roles[NameRole] = "name";
|
||||||
roles[ValueRole] = "value";
|
roles[ValueRole] = "value";
|
||||||
roles[PromptRole] = "prompt";
|
|
||||||
roles[NewResponseRole] = "newResponse";
|
roles[NewResponseRole] = "newResponse";
|
||||||
roles[CurrentResponseRole] = "currentResponse";
|
roles[CurrentResponseRole] = "currentResponse";
|
||||||
roles[StoppedRole] = "stopped";
|
roles[StoppedRole] = "stopped";
|
||||||
@ -137,12 +131,11 @@ public:
|
|||||||
emit countChanged();
|
emit countChanged();
|
||||||
}
|
}
|
||||||
|
|
||||||
void appendResponse(const QString &name, const QString &prompt)
|
void appendResponse(const QString &name)
|
||||||
{
|
{
|
||||||
ChatItem item;
|
ChatItem item;
|
||||||
item.id = m_chatItems.count(); // This is only relevant for responses
|
item.id = m_chatItems.count(); // This is only relevant for responses
|
||||||
item.name = name;
|
item.name = name;
|
||||||
item.prompt = prompt;
|
|
||||||
item.currentResponse = true;
|
item.currentResponse = true;
|
||||||
beginInsertRows(QModelIndex(), m_chatItems.size(), m_chatItems.size());
|
beginInsertRows(QModelIndex(), m_chatItems.size(), m_chatItems.size());
|
||||||
m_chatItems.append(item);
|
m_chatItems.append(item);
|
||||||
@ -266,7 +259,6 @@ public:
|
|||||||
stream << c.id;
|
stream << c.id;
|
||||||
stream << c.name;
|
stream << c.name;
|
||||||
stream << c.value;
|
stream << c.value;
|
||||||
stream << c.prompt;
|
|
||||||
stream << c.newResponse;
|
stream << c.newResponse;
|
||||||
stream << c.currentResponse;
|
stream << c.currentResponse;
|
||||||
stream << c.stopped;
|
stream << c.stopped;
|
||||||
@ -336,7 +328,11 @@ public:
|
|||||||
stream >> c.id;
|
stream >> c.id;
|
||||||
stream >> c.name;
|
stream >> c.name;
|
||||||
stream >> c.value;
|
stream >> c.value;
|
||||||
stream >> c.prompt;
|
if (version < 10) {
|
||||||
|
// This is deprecated and no longer used
|
||||||
|
QString prompt;
|
||||||
|
stream >> prompt;
|
||||||
|
}
|
||||||
stream >> c.newResponse;
|
stream >> c.newResponse;
|
||||||
stream >> c.currentResponse;
|
stream >> c.currentResponse;
|
||||||
stream >> c.stopped;
|
stream >> c.stopped;
|
||||||
|
Loading…
Reference in New Issue
Block a user