Modellist temp

This commit is contained in:
Adam Treat
2023-06-22 15:44:49 -04:00
parent c1794597a7
commit 7f01b153b3
25 changed files with 1784 additions and 1108 deletions

View File

@@ -5,7 +5,9 @@ import QtQuick.Controls.Basic
import QtQuick.Layouts
import Qt5Compat.GraphicalEffects
import llm
import chatlistmodel
import download
import modellist
import network
import gpt4all
@@ -22,7 +24,7 @@ Window {
id: theme
}
property var currentChat: LLM.chatListModel.currentChat
property var currentChat: ChatListModel.currentChat
property var chatModel: currentChat.chatModel
property bool hasSaved: false
@@ -31,12 +33,12 @@ Window {
return;
savingPopup.open();
LLM.chatListModel.saveChats();
ChatListModel.saveChats();
close.accepted = false
}
Connections {
target: LLM.chatListModel
target: ChatListModel
function onSaveChatsFinished() {
window.hasSaved = true;
savingPopup.close();
@@ -96,7 +98,7 @@ Window {
}
// check for any current models and if not, open download dialog
if (currentChat.modelList.length === 0 && !firstStartDialog.opened) {
if (ModelList.count === 0 && !firstStartDialog.opened) {
downloadNewModels.open();
return;
}
@@ -144,7 +146,17 @@ Window {
id: modelLoadingErrorPopup
anchors.centerIn: parent
shouldTimeOut: false
text: currentChat.modelLoadingError
text: qsTr("<h3>Encountered an error loading model:</h3><br>")
+ "<i>\"" + currentChat.modelLoadingError + "\"</i>"
+ qsTr("<br><br>Model loading failures can happen for a variety of reasons, but the most common "
+ "causes include a bad file format, an incomplete or corrupted download, the wrong file "
+ "type or an incompatible model type. Here are some suggestions for resolving the problem:"
+ "<br><ul>"
+ "<li>Ensure the model file has a compatible ggml format and type"
+ "<li>Check the model file is complete in the download folder"
+ "<li>You can find the download folder in the settings dialog"
+ "<li>If you've sideloaded the model ensure the file is not corrupt by checking md5sum"
+ "<li>Check out our <a href=\"https://discord.gg/4M2QFmTt2k\">discord channel</a> for help")
}
Rectangle {
@@ -180,12 +192,22 @@ Window {
anchors.horizontalCenter: parent.horizontalCenter
anchors.horizontalCenterOffset: window.width >= 950 ? 0 : Math.max(-((950 - window.width) / 2), -99.5)
enabled: !currentChat.isServer
model: currentChat.modelList
model: ModelList.installedModels
valueRole: "filename"
textRole: "name"
Connections {
target: currentChat
function onModelInfoChanged() {
comboBox.currentIndex = comboBox.indexOfValue(currentChat.modelInfo.filename)
}
}
contentItem: Text {
anchors.horizontalCenter: parent.horizontalCenter
leftPadding: 10
rightPadding: 20
text: currentChat.modelLoadingError !== "" ? "Model loading error..." : comboBox.displayText
text: currentChat.modelLoadingError !== "" ? qsTr("Model loading error...")
: (comboBox.textAt(comboBox.currentIndex) !== "" ? comboBox.textAt(comboBox.currentIndex)
: comboBox.valueAt(comboBox.currentIndex))
font: comboBox.font
color: theme.textColor
verticalAlignment: Text.AlignVCenter
@@ -195,7 +217,7 @@ Window {
delegate: ItemDelegate {
width: comboBox.width
contentItem: Text {
text: modelData
text: name !== "" ? name : filename
color: theme.textColor
font: comboBox.font
elide: Text.ElideRight
@@ -212,7 +234,7 @@ Window {
onActivated: {
currentChat.stopGenerating()
currentChat.reset();
currentChat.modelName = comboBox.currentText
currentChat.modelInfo = ModelList.modelInfo(comboBox.valueAt(comboBox.currentIndex))
}
}
}
@@ -552,7 +574,7 @@ Window {
id: downloadNewModels
anchors.centerIn: parent
width: Math.min(1024, window.width - (window.width * .2))
height: Math.min(600, window.height - (window.height * .2))
height: window.height - (window.height * .2)
Item {
Accessible.role: Accessible.Dialog
Accessible.name: qsTr("Download new models dialog")
@@ -817,7 +839,7 @@ Window {
}
Image {
visible: currentChat.isServer || currentChat.modelName.startsWith("chatgpt-")
visible: currentChat.isServer || currentChat.modelInfo.isChatGPT
anchors.fill: parent
sourceSize.width: 1024
sourceSize.height: 1024