mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-06-26 07:23:38 +00:00
Support loading files if 'ggml' is found anywhere in the name not just at (#1001)
the beginning and add deprecated flag to models.json so older versions will show a model, but later versions don't. This will allow us to transition away from models < ggmlv2 and still allow older installs of gpt4all to work.
This commit is contained in:
parent
abc081e48d
commit
a576220b18
@ -399,8 +399,16 @@ QList<QString> Chat::modelList() const
|
|||||||
|
|
||||||
{
|
{
|
||||||
QDir dir(exePath);
|
QDir dir(exePath);
|
||||||
dir.setNameFilters(QStringList() << "ggml-*.bin");
|
QStringList allFiles = dir.entryList(QDir::Files);
|
||||||
QStringList fileNames = dir.entryList();
|
|
||||||
|
// All files that end with .bin and have 'ggml' somewhere in the name
|
||||||
|
QStringList fileNames;
|
||||||
|
for(const QString& filename : allFiles) {
|
||||||
|
if (filename.endsWith(".bin") && filename.contains("ggml")) {
|
||||||
|
fileNames.append(filename);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (const QString& f : fileNames) {
|
for (const QString& f : fileNames) {
|
||||||
QString filePath = exePath + f;
|
QString filePath = exePath + f;
|
||||||
QFileInfo info(filePath);
|
QFileInfo info(filePath);
|
||||||
@ -416,8 +424,15 @@ QList<QString> Chat::modelList() const
|
|||||||
|
|
||||||
if (localPath != exePath) {
|
if (localPath != exePath) {
|
||||||
QDir dir(localPath);
|
QDir dir(localPath);
|
||||||
dir.setNameFilters(QStringList() << "ggml-*.bin" << "chatgpt-*.txt");
|
QStringList allFiles = dir.entryList(QDir::Files);
|
||||||
QStringList fileNames = dir.entryList();
|
QStringList fileNames;
|
||||||
|
for(const QString& filename : allFiles) {
|
||||||
|
if ((filename.endsWith(".bin") && filename.contains("ggml"))
|
||||||
|
|| (filename.endsWith(".txt") && filename.startsWith("chatgpt-"))) {
|
||||||
|
fileNames.append(filename);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (const QString &f : fileNames) {
|
for (const QString &f : fileNames) {
|
||||||
QString filePath = localPath + f;
|
QString filePath = localPath + f;
|
||||||
QFileInfo info(filePath);
|
QFileInfo info(filePath);
|
||||||
|
@ -23,7 +23,14 @@
|
|||||||
|
|
||||||
static QString modelFilePath(const QString &modelName, bool isChatGPT)
|
static QString modelFilePath(const QString &modelName, bool isChatGPT)
|
||||||
{
|
{
|
||||||
QString modelFilename = isChatGPT ? modelName + ".txt" : "/ggml-" + modelName + ".bin";
|
QVector<QString> possibleFilePaths;
|
||||||
|
if (isChatGPT)
|
||||||
|
possibleFilePaths << "/" + modelName + ".txt";
|
||||||
|
else {
|
||||||
|
possibleFilePaths << "/ggml-" + modelName + ".bin";
|
||||||
|
possibleFilePaths << "/" + modelName + ".bin";
|
||||||
|
}
|
||||||
|
for (const QString &modelFilename : possibleFilePaths) {
|
||||||
QString appPath = QCoreApplication::applicationDirPath() + modelFilename;
|
QString appPath = QCoreApplication::applicationDirPath() + modelFilename;
|
||||||
QFileInfo infoAppPath(appPath);
|
QFileInfo infoAppPath(appPath);
|
||||||
if (infoAppPath.exists())
|
if (infoAppPath.exists())
|
||||||
@ -33,6 +40,7 @@ static QString modelFilePath(const QString &modelName, bool isChatGPT)
|
|||||||
QFileInfo infoLocalPath(downloadPath);
|
QFileInfo infoLocalPath(downloadPath);
|
||||||
if (infoLocalPath.exists())
|
if (infoLocalPath.exists())
|
||||||
return downloadPath;
|
return downloadPath;
|
||||||
|
}
|
||||||
return QString();
|
return QString();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -262,7 +270,9 @@ bool ChatLLM::loadModel(const QString &modelName)
|
|||||||
|
|
||||||
if (m_modelInfo.model) {
|
if (m_modelInfo.model) {
|
||||||
QString basename = fileInfo.completeBaseName();
|
QString basename = fileInfo.completeBaseName();
|
||||||
setModelName(m_isChatGPT ? basename : basename.remove(0, 5)); // remove the ggml- prefix
|
if (basename.startsWith("ggml-")) // remove the ggml- prefix
|
||||||
|
basename.remove(0, 5);
|
||||||
|
setModelName(basename);
|
||||||
}
|
}
|
||||||
|
|
||||||
return m_modelInfo.model;
|
return m_modelInfo.model;
|
||||||
|
@ -373,6 +373,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
|
|||||||
QString modelFilename = obj["filename"].toString();
|
QString modelFilename = obj["filename"].toString();
|
||||||
QString modelFilesize = obj["filesize"].toString();
|
QString modelFilesize = obj["filesize"].toString();
|
||||||
QString requiresVersion = obj["requires"].toString();
|
QString requiresVersion = obj["requires"].toString();
|
||||||
|
QString deprecatedVersion = obj["deprecated"].toString();
|
||||||
QString url = obj["url"].toString();
|
QString url = obj["url"].toString();
|
||||||
QByteArray modelMd5sum = obj["md5sum"].toString().toLatin1().constData();
|
QByteArray modelMd5sum = obj["md5sum"].toString().toLatin1().constData();
|
||||||
bool isDefault = obj.contains("isDefault") && obj["isDefault"] == QString("true");
|
bool isDefault = obj.contains("isDefault") && obj["isDefault"] == QString("true");
|
||||||
@ -381,12 +382,19 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
|
|||||||
bool bestMPT = obj.contains("bestMPT") && obj["bestMPT"] == QString("true");
|
bool bestMPT = obj.contains("bestMPT") && obj["bestMPT"] == QString("true");
|
||||||
QString description = obj["description"].toString();
|
QString description = obj["description"].toString();
|
||||||
|
|
||||||
|
// If the currentVersion version is strictly less than required version, then continue
|
||||||
if (!requiresVersion.isEmpty()
|
if (!requiresVersion.isEmpty()
|
||||||
&& requiresVersion != currentVersion
|
&& requiresVersion != currentVersion
|
||||||
&& compareVersions(requiresVersion, currentVersion)) {
|
&& compareVersions(requiresVersion, currentVersion)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If the current version is strictly greater than the deprecated version, then continue
|
||||||
|
if (!deprecatedVersion.isEmpty()
|
||||||
|
&& compareVersions(currentVersion, deprecatedVersion)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
if (isDefault)
|
if (isDefault)
|
||||||
defaultModel = modelFilename;
|
defaultModel = modelFilename;
|
||||||
quint64 sz = modelFilesize.toULongLong();
|
quint64 sz = modelFilesize.toULongLong();
|
||||||
@ -413,6 +421,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
|
|||||||
modelInfo.bestMPT = bestMPT;
|
modelInfo.bestMPT = bestMPT;
|
||||||
modelInfo.description = description;
|
modelInfo.description = description;
|
||||||
modelInfo.requiresVersion = requiresVersion;
|
modelInfo.requiresVersion = requiresVersion;
|
||||||
|
modelInfo.deprecatedVersion = deprecatedVersion;
|
||||||
modelInfo.url = url;
|
modelInfo.url = url;
|
||||||
m_modelMap.insert(modelInfo.filename, modelInfo);
|
m_modelMap.insert(modelInfo.filename, modelInfo);
|
||||||
}
|
}
|
||||||
@ -447,9 +456,9 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
|
|||||||
}
|
}
|
||||||
|
|
||||||
// remove ggml- prefix and .bin suffix
|
// remove ggml- prefix and .bin suffix
|
||||||
Q_ASSERT(defaultModel.startsWith("ggml-"));
|
if (defaultModel.startsWith("ggml-"))
|
||||||
defaultModel = defaultModel.remove(0, 5);
|
defaultModel = defaultModel.remove(0, 5);
|
||||||
Q_ASSERT(defaultModel.endsWith(".bin"));
|
if (defaultModel.endsWith(".bin"))
|
||||||
defaultModel.chop(4);
|
defaultModel.chop(4);
|
||||||
|
|
||||||
QSettings settings;
|
QSettings settings;
|
||||||
|
@ -23,6 +23,7 @@ struct ModelInfo {
|
|||||||
Q_PROPERTY(bool isChatGPT MEMBER isChatGPT)
|
Q_PROPERTY(bool isChatGPT MEMBER isChatGPT)
|
||||||
Q_PROPERTY(QString description MEMBER description)
|
Q_PROPERTY(QString description MEMBER description)
|
||||||
Q_PROPERTY(QString requiresVersion MEMBER requiresVersion)
|
Q_PROPERTY(QString requiresVersion MEMBER requiresVersion)
|
||||||
|
Q_PROPERTY(QString deprecatedVersion MEMBER deprecatedVersion)
|
||||||
Q_PROPERTY(QString url MEMBER url)
|
Q_PROPERTY(QString url MEMBER url)
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@ -38,6 +39,7 @@ public:
|
|||||||
bool isChatGPT = false;
|
bool isChatGPT = false;
|
||||||
QString description;
|
QString description;
|
||||||
QString requiresVersion;
|
QString requiresVersion;
|
||||||
|
QString deprecatedVersion;
|
||||||
QString url;
|
QString url;
|
||||||
};
|
};
|
||||||
Q_DECLARE_METATYPE(ModelInfo)
|
Q_DECLARE_METATYPE(ModelInfo)
|
||||||
|
@ -13,9 +13,9 @@
|
|||||||
static inline QString modelToName(const ModelInfo &info)
|
static inline QString modelToName(const ModelInfo &info)
|
||||||
{
|
{
|
||||||
QString modelName = info.filename;
|
QString modelName = info.filename;
|
||||||
Q_ASSERT(modelName.startsWith("ggml-"));
|
if (modelName.startsWith("ggml-"))
|
||||||
modelName = modelName.remove(0, 5);
|
modelName = modelName.remove(0, 5);
|
||||||
Q_ASSERT(modelName.endsWith(".bin"));
|
if (modelName.endsWith(".bin"))
|
||||||
modelName.chop(4);
|
modelName.chop(4);
|
||||||
return modelName;
|
return modelName;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user