Better error handling when the model fails to load.

This commit is contained in:
Adam Treat 2023-06-04 14:55:05 -04:00
parent b5971b0d41
commit a84bcccb3a
6 changed files with 69 additions and 12 deletions

View File

@ -3,6 +3,7 @@
#include "localdocs.h" #include "localdocs.h"
#include "network.h" #include "network.h"
#include "download.h" #include "download.h"
#include "server.h"
Chat::Chat(QObject *parent) Chat::Chat(QObject *parent)
: QObject(parent) : QObject(parent)
@ -53,7 +54,7 @@ void Chat::connectLLM()
connect(m_llmodel, &ChatLLM::promptProcessing, this, &Chat::promptProcessing, Qt::QueuedConnection); connect(m_llmodel, &ChatLLM::promptProcessing, this, &Chat::promptProcessing, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::responseStopped, this, &Chat::responseStopped, Qt::QueuedConnection); connect(m_llmodel, &ChatLLM::responseStopped, this, &Chat::responseStopped, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::modelNameChanged, this, &Chat::handleModelNameChanged, Qt::QueuedConnection); connect(m_llmodel, &ChatLLM::modelNameChanged, this, &Chat::handleModelNameChanged, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::modelLoadingError, this, &Chat::modelLoadingError, Qt::QueuedConnection); connect(m_llmodel, &ChatLLM::modelLoadingError, this, &Chat::handleModelLoadingError, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::recalcChanged, this, &Chat::handleRecalculating, Qt::QueuedConnection); connect(m_llmodel, &ChatLLM::recalcChanged, this, &Chat::handleRecalculating, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::generatedNameChanged, this, &Chat::generatedNameChanged, Qt::QueuedConnection); connect(m_llmodel, &ChatLLM::generatedNameChanged, this, &Chat::generatedNameChanged, Qt::QueuedConnection);
@ -243,6 +244,8 @@ void Chat::setModelName(const QString &modelName)
{ {
// doesn't block but will unload old model and load new one which the gui can see through changes // doesn't block but will unload old model and load new one which the gui can see through changes
// to the isModelLoaded property // to the isModelLoaded property
m_modelLoadingError = QString();
emit modelLoadingErrorChanged();
emit modelNameChangeRequested(modelName); emit modelNameChangeRequested(modelName);
} }
@ -270,11 +273,15 @@ bool Chat::isRecalc() const
void Chat::loadDefaultModel() void Chat::loadDefaultModel()
{ {
m_modelLoadingError = QString();
emit modelLoadingErrorChanged();
emit loadDefaultModelRequested(); emit loadDefaultModelRequested();
} }
void Chat::loadModel(const QString &modelName) void Chat::loadModel(const QString &modelName)
{ {
m_modelLoadingError = QString();
emit modelLoadingErrorChanged();
emit loadModelRequested(modelName); emit loadModelRequested(modelName);
} }
@ -322,6 +329,13 @@ void Chat::handleModelNameChanged()
emit modelNameChanged(); emit modelNameChanged();
} }
void Chat::handleModelLoadingError(const QString &error)
{
qWarning() << "ERROR:" << qPrintable(error) << "id" << id();
m_modelLoadingError = error;
emit modelLoadingErrorChanged();
}
bool Chat::serialize(QDataStream &stream, int version) const bool Chat::serialize(QDataStream &stream, int version) const
{ {
stream << m_creationDate; stream << m_creationDate;

View File

@ -8,7 +8,6 @@
#include "chatllm.h" #include "chatllm.h"
#include "chatmodel.h" #include "chatmodel.h"
#include "database.h" #include "database.h"
#include "server.h"
class Chat : public QObject class Chat : public QObject
{ {
@ -25,6 +24,7 @@ class Chat : public QObject
Q_PROPERTY(bool isServer READ isServer NOTIFY isServerChanged) Q_PROPERTY(bool isServer READ isServer NOTIFY isServerChanged)
Q_PROPERTY(QString responseState READ responseState NOTIFY responseStateChanged) Q_PROPERTY(QString responseState READ responseState NOTIFY responseStateChanged)
Q_PROPERTY(QList<QString> collectionList READ collectionList NOTIFY collectionListChanged) Q_PROPERTY(QList<QString> collectionList READ collectionList NOTIFY collectionListChanged)
Q_PROPERTY(QString modelLoadingError READ modelLoadingError NOTIFY modelLoadingErrorChanged)
QML_ELEMENT QML_ELEMENT
QML_UNCREATABLE("Only creatable from c++!") QML_UNCREATABLE("Only creatable from c++!")
@ -89,6 +89,8 @@ public:
Q_INVOKABLE void removeCollection(const QString &collection); Q_INVOKABLE void removeCollection(const QString &collection);
void resetResponseState(); void resetResponseState();
QString modelLoadingError() const { return m_modelLoadingError; }
public Q_SLOTS: public Q_SLOTS:
void serverNewPromptResponsePair(const QString &prompt); void serverNewPromptResponsePair(const QString &prompt);
@ -113,7 +115,7 @@ Q_SIGNALS:
void loadModelRequested(const QString &modelName); void loadModelRequested(const QString &modelName);
void generateNameRequested(); void generateNameRequested();
void modelListChanged(); void modelListChanged();
void modelLoadingError(const QString &error); void modelLoadingErrorChanged();
void isServerChanged(); void isServerChanged();
void collectionListChanged(); void collectionListChanged();
@ -125,12 +127,14 @@ private Q_SLOTS:
void generatedNameChanged(); void generatedNameChanged();
void handleRecalculating(); void handleRecalculating();
void handleModelNameChanged(); void handleModelNameChanged();
void handleModelLoadingError(const QString &error);
private: private:
QString m_id; QString m_id;
QString m_name; QString m_name;
QString m_userName; QString m_userName;
QString m_savedModelName; QString m_savedModelName;
QString m_modelLoadingError;
QList<QString> m_collections; QList<QString> m_collections;
ChatModel *m_chatModel; ChatModel *m_chatModel;
bool m_responseInProgress; bool m_responseInProgress;

View File

@ -233,7 +233,7 @@ void ChatListModel::restoreChat(Chat *chat)
{ {
chat->setParent(this); chat->setParent(this);
connect(chat, &Chat::nameChanged, this, &ChatListModel::nameChanged); connect(chat, &Chat::nameChanged, this, &ChatListModel::nameChanged);
connect(chat, &Chat::modelLoadingError, this, &ChatListModel::handleModelLoadingError); connect(chat, &Chat::modelLoadingErrorChanged, this, &ChatListModel::handleModelLoadingError);
if (m_dummyChat) { if (m_dummyChat) {
beginResetModel(); beginResetModel();

View File

@ -227,10 +227,9 @@ private Q_SLOTS:
emit dataChanged(index, index, {NameRole}); emit dataChanged(index, index, {NameRole});
} }
void handleModelLoadingError(const QString &error) void handleModelLoadingError()
{ {
Chat *chat = qobject_cast<Chat *>(sender()); Chat *chat = qobject_cast<Chat *>(sender());
qWarning() << "ERROR:" << qPrintable(error) << "id" << chat->id();
removeChat(chat); removeChat(chat);
} }

View File

@ -228,6 +228,11 @@ bool ChatLLM::loadModel(const QString &modelName)
case 'M': m_modelType = LLModelType::MPT_; break; case 'M': m_modelType = LLModelType::MPT_; break;
default: delete std::exchange(m_modelInfo.model, nullptr); default: delete std::exchange(m_modelInfo.model, nullptr);
} }
} else {
if (!m_isServer)
LLModelStore::globalInstance()->releaseModel(m_modelInfo); // release back into the store
m_modelInfo = LLModelInfo();
emit modelLoadingError(QString("Could not load model due to invalid format for %1").arg(modelName));
} }
} }
#if defined(DEBUG_MODEL_LOADING) #if defined(DEBUG_MODEL_LOADING)
@ -249,8 +254,8 @@ bool ChatLLM::loadModel(const QString &modelName)
} else { } else {
if (!m_isServer) if (!m_isServer)
LLModelStore::globalInstance()->releaseModel(m_modelInfo); // release back into the store LLModelStore::globalInstance()->releaseModel(m_modelInfo); // release back into the store
const QString error = QString("Could not find model %1").arg(modelName); m_modelInfo = LLModelInfo();
emit modelLoadingError(error); emit modelLoadingError(QString("Could not find file for model %1").arg(modelName));
} }
if (m_modelInfo.model) { if (m_modelInfo.model) {
@ -342,8 +347,7 @@ void ChatLLM::setModelName(const QString &modelName)
void ChatLLM::modelNameChangeRequested(const QString &modelName) void ChatLLM::modelNameChangeRequested(const QString &modelName)
{ {
if (!loadModel(modelName)) loadModel(modelName);
qWarning() << "ERROR: Could not load model" << modelName;
} }
bool ChatLLM::handlePrompt(int32_t token) bool ChatLLM::handlePrompt(int32_t token)

View File

@ -62,6 +62,10 @@ Window {
if (Network.isActive && !currentChat.responseInProgress) if (Network.isActive && !currentChat.responseInProgress)
Network.sendConversation(currentChat.id, getConversationJson()); Network.sendConversation(currentChat.id, getConversationJson());
} }
function onModelLoadingErrorChanged() {
if (currentChat.modelLoadingError !== "")
modelLoadingErrorPopup.open()
}
} }
function startupDialogs() { function startupDialogs() {
@ -116,6 +120,13 @@ Window {
Accessible.name: title Accessible.name: title
} }
PopupDialog {
id: modelLoadingErrorPopup
anchors.centerIn: parent
shouldTimeOut: false
text: currentChat.modelLoadingError
}
Rectangle { Rectangle {
id: header id: header
anchors.left: parent.left anchors.left: parent.left
@ -126,7 +137,7 @@ Window {
Item { Item {
anchors.centerIn: parent anchors.centerIn: parent
height: childrenRect.height height: childrenRect.height
visible: currentChat.isModelLoaded || currentChat.isServer visible: currentChat.isModelLoaded || currentChat.modelLoadingError !== "" || currentChat.isServer
Label { Label {
id: modelLabel id: modelLabel
@ -150,6 +161,31 @@ Window {
anchors.horizontalCenterOffset: window.width >= 950 ? 0 : Math.max(-((950 - window.width) / 2), -99.5) anchors.horizontalCenterOffset: window.width >= 950 ? 0 : Math.max(-((950 - window.width) / 2), -99.5)
enabled: !currentChat.isServer enabled: !currentChat.isServer
model: currentChat.modelList model: currentChat.modelList
contentItem: Text {
anchors.horizontalCenter: parent.horizontalCenter
leftPadding: 10
rightPadding: 20
text: currentChat.modelLoadingError !== "" ? "Model loading error..." : comboBox.displayText
font: comboBox.font
color: theme.textColor
verticalAlignment: Text.AlignVCenter
horizontalAlignment: Text.AlignHCenter
elide: Text.ElideRight
}
delegate: ItemDelegate {
width: comboBox.width
contentItem: Text {
text: modelData
color: theme.textColor
font: comboBox.font
elide: Text.ElideRight
verticalAlignment: Text.AlignVCenter
}
background: Rectangle {
color: highlighted ? theme.backgroundLight : theme.backgroundDark
}
highlighted: comboBox.highlightedIndex === index
}
Accessible.role: Accessible.ComboBox Accessible.role: Accessible.ComboBox
Accessible.name: qsTr("ComboBox for displaying/picking the current model") Accessible.name: qsTr("ComboBox for displaying/picking the current model")
Accessible.description: qsTr("Use this for picking the current model to use; the first item is the current model") Accessible.description: qsTr("Use this for picking the current model to use; the first item is the current model")
@ -163,7 +199,7 @@ Window {
Item { Item {
anchors.centerIn: parent anchors.centerIn: parent
visible: !currentChat.isModelLoaded && !currentChat.isServer visible: !currentChat.isModelLoaded && currentChat.modelLoadingError === "" && !currentChat.isServer
width: childrenRect.width width: childrenRect.width
height: childrenRect.height height: childrenRect.height
Row { Row {