From a576220b182c6af5df0401f1ac4151bba249b6c7 Mon Sep 17 00:00:00 2001 From: AT Date: Fri, 16 Jun 2023 08:09:33 -0700 Subject: [PATCH] Support loading files if 'ggml' is found anywhere in the name not just at (#1001) the beginning and add deprecated flag to models.json so older versions will show a model, but later versions don't. This will allow us to transition away from models < ggmlv2 and still allow older installs of gpt4all to work. --- gpt4all-chat/chat.cpp | 23 +++++++++++++++++++---- gpt4all-chat/chatllm.cpp | 30 ++++++++++++++++++++---------- gpt4all-chat/download.cpp | 17 +++++++++++++---- gpt4all-chat/download.h | 2 ++ gpt4all-chat/server.cpp | 8 ++++---- 5 files changed, 58 insertions(+), 22 deletions(-) diff --git a/gpt4all-chat/chat.cpp b/gpt4all-chat/chat.cpp index f33a77fd..f3d55811 100644 --- a/gpt4all-chat/chat.cpp +++ b/gpt4all-chat/chat.cpp @@ -399,8 +399,16 @@ QList Chat::modelList() const { QDir dir(exePath); - dir.setNameFilters(QStringList() << "ggml-*.bin"); - QStringList fileNames = dir.entryList(); + QStringList allFiles = dir.entryList(QDir::Files); + + // All files that end with .bin and have 'ggml' somewhere in the name + QStringList fileNames; + for(const QString& filename : allFiles) { + if (filename.endsWith(".bin") && filename.contains("ggml")) { + fileNames.append(filename); + } + } + for (const QString& f : fileNames) { QString filePath = exePath + f; QFileInfo info(filePath); @@ -416,8 +424,15 @@ QList Chat::modelList() const if (localPath != exePath) { QDir dir(localPath); - dir.setNameFilters(QStringList() << "ggml-*.bin" << "chatgpt-*.txt"); - QStringList fileNames = dir.entryList(); + QStringList allFiles = dir.entryList(QDir::Files); + QStringList fileNames; + for(const QString& filename : allFiles) { + if ((filename.endsWith(".bin") && filename.contains("ggml")) + || (filename.endsWith(".txt") && filename.startsWith("chatgpt-"))) { + fileNames.append(filename); + } + } + for (const QString &f : fileNames) { QString filePath = localPath + f; QFileInfo info(filePath); diff --git a/gpt4all-chat/chatllm.cpp b/gpt4all-chat/chatllm.cpp index 161c3a9d..6b86663d 100644 --- a/gpt4all-chat/chatllm.cpp +++ b/gpt4all-chat/chatllm.cpp @@ -23,16 +23,24 @@ static QString modelFilePath(const QString &modelName, bool isChatGPT) { - QString modelFilename = isChatGPT ? modelName + ".txt" : "/ggml-" + modelName + ".bin"; - QString appPath = QCoreApplication::applicationDirPath() + modelFilename; - QFileInfo infoAppPath(appPath); - if (infoAppPath.exists()) - return appPath; + QVector possibleFilePaths; + if (isChatGPT) + possibleFilePaths << "/" + modelName + ".txt"; + else { + possibleFilePaths << "/ggml-" + modelName + ".bin"; + possibleFilePaths << "/" + modelName + ".bin"; + } + for (const QString &modelFilename : possibleFilePaths) { + QString appPath = QCoreApplication::applicationDirPath() + modelFilename; + QFileInfo infoAppPath(appPath); + if (infoAppPath.exists()) + return appPath; - QString downloadPath = Download::globalInstance()->downloadLocalModelsPath() + modelFilename; - QFileInfo infoLocalPath(downloadPath); - if (infoLocalPath.exists()) - return downloadPath; + QString downloadPath = Download::globalInstance()->downloadLocalModelsPath() + modelFilename; + QFileInfo infoLocalPath(downloadPath); + if (infoLocalPath.exists()) + return downloadPath; + } return QString(); } @@ -262,7 +270,9 @@ bool ChatLLM::loadModel(const QString &modelName) if (m_modelInfo.model) { QString basename = fileInfo.completeBaseName(); - setModelName(m_isChatGPT ? basename : basename.remove(0, 5)); // remove the ggml- prefix + if (basename.startsWith("ggml-")) // remove the ggml- prefix + basename.remove(0, 5); + setModelName(basename); } return m_modelInfo.model; diff --git a/gpt4all-chat/download.cpp b/gpt4all-chat/download.cpp index 3661cf27..6635b597 100644 --- a/gpt4all-chat/download.cpp +++ b/gpt4all-chat/download.cpp @@ -373,6 +373,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData) QString modelFilename = obj["filename"].toString(); QString modelFilesize = obj["filesize"].toString(); QString requiresVersion = obj["requires"].toString(); + QString deprecatedVersion = obj["deprecated"].toString(); QString url = obj["url"].toString(); QByteArray modelMd5sum = obj["md5sum"].toString().toLatin1().constData(); bool isDefault = obj.contains("isDefault") && obj["isDefault"] == QString("true"); @@ -381,12 +382,19 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData) bool bestMPT = obj.contains("bestMPT") && obj["bestMPT"] == QString("true"); QString description = obj["description"].toString(); + // If the currentVersion version is strictly less than required version, then continue if (!requiresVersion.isEmpty() && requiresVersion != currentVersion && compareVersions(requiresVersion, currentVersion)) { continue; } + // If the current version is strictly greater than the deprecated version, then continue + if (!deprecatedVersion.isEmpty() + && compareVersions(currentVersion, deprecatedVersion)) { + continue; + } + if (isDefault) defaultModel = modelFilename; quint64 sz = modelFilesize.toULongLong(); @@ -413,6 +421,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData) modelInfo.bestMPT = bestMPT; modelInfo.description = description; modelInfo.requiresVersion = requiresVersion; + modelInfo.deprecatedVersion = deprecatedVersion; modelInfo.url = url; m_modelMap.insert(modelInfo.filename, modelInfo); } @@ -447,10 +456,10 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData) } // remove ggml- prefix and .bin suffix - Q_ASSERT(defaultModel.startsWith("ggml-")); - defaultModel = defaultModel.remove(0, 5); - Q_ASSERT(defaultModel.endsWith(".bin")); - defaultModel.chop(4); + if (defaultModel.startsWith("ggml-")) + defaultModel = defaultModel.remove(0, 5); + if (defaultModel.endsWith(".bin")) + defaultModel.chop(4); QSettings settings; settings.sync(); diff --git a/gpt4all-chat/download.h b/gpt4all-chat/download.h index a34755f6..abeec11e 100644 --- a/gpt4all-chat/download.h +++ b/gpt4all-chat/download.h @@ -23,6 +23,7 @@ struct ModelInfo { Q_PROPERTY(bool isChatGPT MEMBER isChatGPT) Q_PROPERTY(QString description MEMBER description) Q_PROPERTY(QString requiresVersion MEMBER requiresVersion) + Q_PROPERTY(QString deprecatedVersion MEMBER deprecatedVersion) Q_PROPERTY(QString url MEMBER url) public: @@ -38,6 +39,7 @@ public: bool isChatGPT = false; QString description; QString requiresVersion; + QString deprecatedVersion; QString url; }; Q_DECLARE_METATYPE(ModelInfo) diff --git a/gpt4all-chat/server.cpp b/gpt4all-chat/server.cpp index 2a65546c..5094b38c 100644 --- a/gpt4all-chat/server.cpp +++ b/gpt4all-chat/server.cpp @@ -13,10 +13,10 @@ static inline QString modelToName(const ModelInfo &info) { QString modelName = info.filename; - Q_ASSERT(modelName.startsWith("ggml-")); - modelName = modelName.remove(0, 5); - Q_ASSERT(modelName.endsWith(".bin")); - modelName.chop(4); + if (modelName.startsWith("ggml-")) + modelName = modelName.remove(0, 5); + if (modelName.endsWith(".bin")) + modelName.chop(4); return modelName; }