Support loading files if 'ggml' is found anywhere in the name not just at (#1001)

the beginning and add deprecated flag to models.json so older versions will
show a model, but later versions don't. This will allow us to transition
away from models < ggmlv2 and still allow older installs of gpt4all to work.
This commit is contained in:
AT 2023-06-16 08:09:33 -07:00 committed by GitHub
parent abc081e48d
commit a576220b18
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 58 additions and 22 deletions

View File

@ -399,8 +399,16 @@ QList<QString> Chat::modelList() const
{
QDir dir(exePath);
dir.setNameFilters(QStringList() << "ggml-*.bin");
QStringList fileNames = dir.entryList();
QStringList allFiles = dir.entryList(QDir::Files);
// All files that end with .bin and have 'ggml' somewhere in the name
QStringList fileNames;
for(const QString& filename : allFiles) {
if (filename.endsWith(".bin") && filename.contains("ggml")) {
fileNames.append(filename);
}
}
for (const QString& f : fileNames) {
QString filePath = exePath + f;
QFileInfo info(filePath);
@ -416,8 +424,15 @@ QList<QString> Chat::modelList() const
if (localPath != exePath) {
QDir dir(localPath);
dir.setNameFilters(QStringList() << "ggml-*.bin" << "chatgpt-*.txt");
QStringList fileNames = dir.entryList();
QStringList allFiles = dir.entryList(QDir::Files);
QStringList fileNames;
for(const QString& filename : allFiles) {
if ((filename.endsWith(".bin") && filename.contains("ggml"))
|| (filename.endsWith(".txt") && filename.startsWith("chatgpt-"))) {
fileNames.append(filename);
}
}
for (const QString &f : fileNames) {
QString filePath = localPath + f;
QFileInfo info(filePath);

View File

@ -23,16 +23,24 @@
static QString modelFilePath(const QString &modelName, bool isChatGPT)
{
QString modelFilename = isChatGPT ? modelName + ".txt" : "/ggml-" + modelName + ".bin";
QString appPath = QCoreApplication::applicationDirPath() + modelFilename;
QFileInfo infoAppPath(appPath);
if (infoAppPath.exists())
return appPath;
QVector<QString> possibleFilePaths;
if (isChatGPT)
possibleFilePaths << "/" + modelName + ".txt";
else {
possibleFilePaths << "/ggml-" + modelName + ".bin";
possibleFilePaths << "/" + modelName + ".bin";
}
for (const QString &modelFilename : possibleFilePaths) {
QString appPath = QCoreApplication::applicationDirPath() + modelFilename;
QFileInfo infoAppPath(appPath);
if (infoAppPath.exists())
return appPath;
QString downloadPath = Download::globalInstance()->downloadLocalModelsPath() + modelFilename;
QFileInfo infoLocalPath(downloadPath);
if (infoLocalPath.exists())
return downloadPath;
QString downloadPath = Download::globalInstance()->downloadLocalModelsPath() + modelFilename;
QFileInfo infoLocalPath(downloadPath);
if (infoLocalPath.exists())
return downloadPath;
}
return QString();
}
@ -262,7 +270,9 @@ bool ChatLLM::loadModel(const QString &modelName)
if (m_modelInfo.model) {
QString basename = fileInfo.completeBaseName();
setModelName(m_isChatGPT ? basename : basename.remove(0, 5)); // remove the ggml- prefix
if (basename.startsWith("ggml-")) // remove the ggml- prefix
basename.remove(0, 5);
setModelName(basename);
}
return m_modelInfo.model;

View File

@ -373,6 +373,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
QString modelFilename = obj["filename"].toString();
QString modelFilesize = obj["filesize"].toString();
QString requiresVersion = obj["requires"].toString();
QString deprecatedVersion = obj["deprecated"].toString();
QString url = obj["url"].toString();
QByteArray modelMd5sum = obj["md5sum"].toString().toLatin1().constData();
bool isDefault = obj.contains("isDefault") && obj["isDefault"] == QString("true");
@ -381,12 +382,19 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
bool bestMPT = obj.contains("bestMPT") && obj["bestMPT"] == QString("true");
QString description = obj["description"].toString();
// If the currentVersion version is strictly less than required version, then continue
if (!requiresVersion.isEmpty()
&& requiresVersion != currentVersion
&& compareVersions(requiresVersion, currentVersion)) {
continue;
}
// If the current version is strictly greater than the deprecated version, then continue
if (!deprecatedVersion.isEmpty()
&& compareVersions(currentVersion, deprecatedVersion)) {
continue;
}
if (isDefault)
defaultModel = modelFilename;
quint64 sz = modelFilesize.toULongLong();
@ -413,6 +421,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
modelInfo.bestMPT = bestMPT;
modelInfo.description = description;
modelInfo.requiresVersion = requiresVersion;
modelInfo.deprecatedVersion = deprecatedVersion;
modelInfo.url = url;
m_modelMap.insert(modelInfo.filename, modelInfo);
}
@ -447,10 +456,10 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
}
// remove ggml- prefix and .bin suffix
Q_ASSERT(defaultModel.startsWith("ggml-"));
defaultModel = defaultModel.remove(0, 5);
Q_ASSERT(defaultModel.endsWith(".bin"));
defaultModel.chop(4);
if (defaultModel.startsWith("ggml-"))
defaultModel = defaultModel.remove(0, 5);
if (defaultModel.endsWith(".bin"))
defaultModel.chop(4);
QSettings settings;
settings.sync();

View File

@ -23,6 +23,7 @@ struct ModelInfo {
Q_PROPERTY(bool isChatGPT MEMBER isChatGPT)
Q_PROPERTY(QString description MEMBER description)
Q_PROPERTY(QString requiresVersion MEMBER requiresVersion)
Q_PROPERTY(QString deprecatedVersion MEMBER deprecatedVersion)
Q_PROPERTY(QString url MEMBER url)
public:
@ -38,6 +39,7 @@ public:
bool isChatGPT = false;
QString description;
QString requiresVersion;
QString deprecatedVersion;
QString url;
};
Q_DECLARE_METATYPE(ModelInfo)

View File

@ -13,10 +13,10 @@
static inline QString modelToName(const ModelInfo &info)
{
QString modelName = info.filename;
Q_ASSERT(modelName.startsWith("ggml-"));
modelName = modelName.remove(0, 5);
Q_ASSERT(modelName.endsWith(".bin"));
modelName.chop(4);
if (modelName.startsWith("ggml-"))
modelName = modelName.remove(0, 5);
if (modelName.endsWith(".bin"))
modelName.chop(4);
return modelName;
}