mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-06-27 07:48:19 +00:00
Make the retrieval/parsing of models.json sync on startup. We were jumping to many hoops to mitigate the async behavior.
This commit is contained in:
parent
9560336490
commit
e70899a26c
@ -13,8 +13,6 @@
|
|||||||
#include <QStandardPaths>
|
#include <QStandardPaths>
|
||||||
#include <QSettings>
|
#include <QSettings>
|
||||||
|
|
||||||
//#define USE_LOCAL_MODELSJSON
|
|
||||||
|
|
||||||
class MyDownload: public Download { };
|
class MyDownload: public Download { };
|
||||||
Q_GLOBAL_STATIC(MyDownload, downloadInstance)
|
Q_GLOBAL_STATIC(MyDownload, downloadInstance)
|
||||||
Download *Download::globalInstance()
|
Download *Download::globalInstance()
|
||||||
@ -32,21 +30,15 @@ Download::Download()
|
|||||||
&Download::handleHashAndSaveFinished, Qt::QueuedConnection);
|
&Download::handleHashAndSaveFinished, Qt::QueuedConnection);
|
||||||
connect(&m_networkManager, &QNetworkAccessManager::sslErrors, this,
|
connect(&m_networkManager, &QNetworkAccessManager::sslErrors, this,
|
||||||
&Download::handleSslErrors);
|
&Download::handleSslErrors);
|
||||||
connect(ModelList::globalInstance(), &ModelList::localModelsPathChanged, this, &Download::updateModelList);
|
|
||||||
updateModelList();
|
|
||||||
updateReleaseNotes();
|
updateReleaseNotes();
|
||||||
m_startTime = QDateTime::currentDateTime();
|
m_startTime = QDateTime::currentDateTime();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool operator==(const ModelInfo& lhs, const ModelInfo& rhs) {
|
static bool operator==(const ReleaseInfo& lhs, const ReleaseInfo& rhs) {
|
||||||
return lhs.filename == rhs.filename && lhs.md5sum == rhs.md5sum;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool operator==(const ReleaseInfo& lhs, const ReleaseInfo& rhs) {
|
|
||||||
return lhs.version == rhs.version;
|
return lhs.version == rhs.version;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool compareVersions(const QString &a, const QString &b) {
|
static bool compareVersions(const QString &a, const QString &b) {
|
||||||
QStringList aParts = a.split('.');
|
QStringList aParts = a.split('.');
|
||||||
QStringList bParts = b.split('.');
|
QStringList bParts = b.split('.');
|
||||||
|
|
||||||
@ -93,21 +85,6 @@ bool Download::isFirstStart() const
|
|||||||
return first;
|
return first;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Download::updateModelList()
|
|
||||||
{
|
|
||||||
#if defined(USE_LOCAL_MODELSJSON)
|
|
||||||
QUrl jsonUrl("file://" + QDir::homePath() + "/dev/large_language_models/gpt4all/gpt4all-chat/metadata/models.json");
|
|
||||||
#else
|
|
||||||
QUrl jsonUrl("http://gpt4all.io/models/models.json");
|
|
||||||
#endif
|
|
||||||
QNetworkRequest request(jsonUrl);
|
|
||||||
QSslConfiguration conf = request.sslConfiguration();
|
|
||||||
conf.setPeerVerifyMode(QSslSocket::VerifyNone);
|
|
||||||
request.setSslConfiguration(conf);
|
|
||||||
QNetworkReply *jsonReply = m_networkManager.get(request);
|
|
||||||
connect(jsonReply, &QNetworkReply::finished, this, &Download::handleModelsJsonDownloadFinished);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Download::updateReleaseNotes()
|
void Download::updateReleaseNotes()
|
||||||
{
|
{
|
||||||
QUrl jsonUrl("http://gpt4all.io/meta/release.json");
|
QUrl jsonUrl("http://gpt4all.io/meta/release.json");
|
||||||
@ -234,134 +211,6 @@ void Download::handleSslErrors(QNetworkReply *reply, const QList<QSslError> &err
|
|||||||
qWarning() << "ERROR: Received ssl error:" << e.errorString() << "for" << url;
|
qWarning() << "ERROR: Received ssl error:" << e.errorString() << "for" << url;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Download::handleModelsJsonDownloadFinished()
|
|
||||||
{
|
|
||||||
QNetworkReply *jsonReply = qobject_cast<QNetworkReply *>(sender());
|
|
||||||
if (!jsonReply)
|
|
||||||
return;
|
|
||||||
|
|
||||||
QByteArray jsonData = jsonReply->readAll();
|
|
||||||
jsonReply->deleteLater();
|
|
||||||
parseModelsJsonFile(jsonData);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Download::parseModelsJsonFile(const QByteArray &jsonData)
|
|
||||||
{
|
|
||||||
QJsonParseError err;
|
|
||||||
QJsonDocument document = QJsonDocument::fromJson(jsonData, &err);
|
|
||||||
if (err.error != QJsonParseError::NoError) {
|
|
||||||
qWarning() << "ERROR: Couldn't parse: " << jsonData << err.errorString();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
QJsonArray jsonArray = document.array();
|
|
||||||
const QString currentVersion = QCoreApplication::applicationVersion();
|
|
||||||
|
|
||||||
for (const QJsonValue &value : jsonArray) {
|
|
||||||
QJsonObject obj = value.toObject();
|
|
||||||
|
|
||||||
QString modelName = obj["name"].toString();
|
|
||||||
QString modelFilename = obj["filename"].toString();
|
|
||||||
QString modelFilesize = obj["filesize"].toString();
|
|
||||||
QString requiresVersion = obj["requires"].toString();
|
|
||||||
QString deprecatedVersion = obj["deprecated"].toString();
|
|
||||||
QString url = obj["url"].toString();
|
|
||||||
QByteArray modelMd5sum = obj["md5sum"].toString().toLatin1().constData();
|
|
||||||
bool isDefault = obj.contains("isDefault") && obj["isDefault"] == QString("true");
|
|
||||||
bool disableGUI = obj.contains("disableGUI") && obj["disableGUI"] == QString("true");
|
|
||||||
QString description = obj["description"].toString();
|
|
||||||
QString order = obj["order"].toString();
|
|
||||||
int ramrequired = obj["ramrequired"].toString().toInt();
|
|
||||||
QString parameters = obj["parameters"].toString();
|
|
||||||
QString quant = obj["quant"].toString();
|
|
||||||
QString type = obj["type"].toString();
|
|
||||||
|
|
||||||
// If the currentVersion version is strictly less than required version, then continue
|
|
||||||
if (!requiresVersion.isEmpty()
|
|
||||||
&& requiresVersion != currentVersion
|
|
||||||
&& compareVersions(requiresVersion, currentVersion)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the current version is strictly greater than the deprecated version, then continue
|
|
||||||
if (!deprecatedVersion.isEmpty()
|
|
||||||
&& compareVersions(currentVersion, deprecatedVersion)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
modelFilesize = ModelList::toFileSize(modelFilesize.toULongLong());
|
|
||||||
|
|
||||||
if (!ModelList::globalInstance()->contains(modelFilename))
|
|
||||||
ModelList::globalInstance()->addModel(modelFilename);
|
|
||||||
|
|
||||||
if (!modelName.isEmpty())
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::NameRole, modelName);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::FilesizeRole, modelFilesize);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::Md5sumRole, modelMd5sum);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::DefaultRole, isDefault);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::DescriptionRole, description);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::RequiresVersionRole, requiresVersion);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::DeprecatedVersionRole, deprecatedVersion);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::UrlRole, url);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::DisableGUIRole, disableGUI);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::OrderRole, order);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::RamrequiredRole, ramrequired);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::ParametersRole, parameters);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::QuantRole, quant);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::TypeRole, type);
|
|
||||||
}
|
|
||||||
|
|
||||||
const QString chatGPTDesc = tr("<ul><li>Requires personal OpenAI API key.</li><li>WARNING: Will send"
|
|
||||||
" your chats to OpenAI!</li><li>Your API key will be stored on disk</li><li>Will only be used"
|
|
||||||
" to communicate with OpenAI</li><li>You can apply for an API key"
|
|
||||||
" <a href=\"https://platform.openai.com/account/api-keys\">here.</a></li>");
|
|
||||||
|
|
||||||
{
|
|
||||||
const QString modelFilename = "chatgpt-gpt-3.5-turbo.txt";
|
|
||||||
if (!ModelList::globalInstance()->contains(modelFilename))
|
|
||||||
ModelList::globalInstance()->addModel(modelFilename);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::NameRole, "ChatGPT-3.5 Turbo");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::FilesizeRole, "minimal");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::ChatGPTRole, true);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::DescriptionRole,
|
|
||||||
tr("<strong>OpenAI's ChatGPT model GPT-3.5 Turbo</strong><br>") + chatGPTDesc);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::RequiresVersionRole, "2.4.2");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::OrderRole, "ca");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::RamrequiredRole, 0);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::ParametersRole, "?");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::QuantRole, "NA");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::TypeRole, "GPT");
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
const QString modelFilename = "chatgpt-gpt-4.txt";
|
|
||||||
if (!ModelList::globalInstance()->contains(modelFilename))
|
|
||||||
ModelList::globalInstance()->addModel(modelFilename);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::NameRole, "ChatGPT-4");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::FilesizeRole, "minimal");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::ChatGPTRole, true);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::DescriptionRole,
|
|
||||||
tr("<strong>OpenAI's ChatGPT model GPT-4</strong><br>") + chatGPTDesc);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::RequiresVersionRole, "2.4.2");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::OrderRole, "cb");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::RamrequiredRole, 0);
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::ParametersRole, "?");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::QuantRole, "NA");
|
|
||||||
ModelList::globalInstance()->updateData(modelFilename, ModelList::TypeRole, "GPT");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ModelList::globalInstance()->installedModels()->count()) {
|
|
||||||
const QString firstModel =
|
|
||||||
ModelList::globalInstance()->installedModels()->firstFilename();
|
|
||||||
QSettings settings;
|
|
||||||
settings.sync();
|
|
||||||
settings.setValue("defaultModel", firstModel);
|
|
||||||
settings.sync();
|
|
||||||
}
|
|
||||||
|
|
||||||
ModelList::globalInstance()->updateModelHasNames();
|
|
||||||
}
|
|
||||||
|
|
||||||
void Download::handleReleaseJsonDownloadFinished()
|
void Download::handleReleaseJsonDownloadFinished()
|
||||||
{
|
{
|
||||||
QNetworkReply *jsonReply = qobject_cast<QNetworkReply *>(sender());
|
QNetworkReply *jsonReply = qobject_cast<QNetworkReply *>(sender());
|
||||||
|
@ -57,12 +57,10 @@ public:
|
|||||||
Q_INVOKABLE bool isFirstStart() const;
|
Q_INVOKABLE bool isFirstStart() const;
|
||||||
|
|
||||||
public Q_SLOTS:
|
public Q_SLOTS:
|
||||||
void updateModelList();
|
|
||||||
void updateReleaseNotes();
|
void updateReleaseNotes();
|
||||||
|
|
||||||
private Q_SLOTS:
|
private Q_SLOTS:
|
||||||
void handleSslErrors(QNetworkReply *reply, const QList<QSslError> &errors);
|
void handleSslErrors(QNetworkReply *reply, const QList<QSslError> &errors);
|
||||||
void handleModelsJsonDownloadFinished();
|
|
||||||
void handleReleaseJsonDownloadFinished();
|
void handleReleaseJsonDownloadFinished();
|
||||||
void handleErrorOccurred(QNetworkReply::NetworkError code);
|
void handleErrorOccurred(QNetworkReply::NetworkError code);
|
||||||
void handleDownloadProgress(qint64 bytesReceived, qint64 bytesTotal);
|
void handleDownloadProgress(qint64 bytesReceived, qint64 bytesTotal);
|
||||||
@ -78,7 +76,6 @@ Q_SIGNALS:
|
|||||||
QFile *tempFile, QNetworkReply *modelReply);
|
QFile *tempFile, QNetworkReply *modelReply);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void parseModelsJsonFile(const QByteArray &jsonData);
|
|
||||||
void parseReleaseJsonFile(const QByteArray &jsonData);
|
void parseReleaseJsonFile(const QByteArray &jsonData);
|
||||||
QString incompleteDownloadPath(const QString &modelFile);
|
QString incompleteDownloadPath(const QString &modelFile);
|
||||||
|
|
||||||
|
@ -207,28 +207,9 @@ Window {
|
|||||||
textRole: "name"
|
textRole: "name"
|
||||||
property string currentModelName: ""
|
property string currentModelName: ""
|
||||||
function updateCurrentModelName() {
|
function updateCurrentModelName() {
|
||||||
// During application startup the model names might not be processed yet, so don't
|
|
||||||
// set the combobox text until this is done OR the timer has timed out
|
|
||||||
if (!ModelList.modelHasNames && startupTimer.running)
|
|
||||||
return
|
|
||||||
var info = ModelList.modelInfo(currentChat.modelInfo.filename);
|
var info = ModelList.modelInfo(currentChat.modelInfo.filename);
|
||||||
comboBox.currentModelName = info.name !== "" ? info.name : info.filename;
|
comboBox.currentModelName = info.name !== "" ? info.name : info.filename;
|
||||||
}
|
}
|
||||||
Timer {
|
|
||||||
id: startupTimer
|
|
||||||
interval: 3000 // 3 seconds
|
|
||||||
running: true
|
|
||||||
repeat: false
|
|
||||||
onTriggered: {
|
|
||||||
comboBox.updateCurrentModelName();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Connections {
|
|
||||||
target: ModelList
|
|
||||||
function onModelHasNamesChanged() {
|
|
||||||
comboBox.updateCurrentModelName();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Connections {
|
Connections {
|
||||||
target: currentChat
|
target: currentChat
|
||||||
function onModelInfoChanged() {
|
function onModelInfoChanged() {
|
||||||
|
@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
|
||||||
|
//#define USE_LOCAL_MODELSJSON
|
||||||
|
|
||||||
InstalledModels::InstalledModels(QObject *parent)
|
InstalledModels::InstalledModels(QObject *parent)
|
||||||
: QSortFilterProxyModel(parent)
|
: QSortFilterProxyModel(parent)
|
||||||
{
|
{
|
||||||
@ -85,7 +87,6 @@ ModelList::ModelList()
|
|||||||
: QAbstractListModel(nullptr)
|
: QAbstractListModel(nullptr)
|
||||||
, m_installedModels(new InstalledModels(this))
|
, m_installedModels(new InstalledModels(this))
|
||||||
, m_downloadableModels(new DownloadableModels(this))
|
, m_downloadableModels(new DownloadableModels(this))
|
||||||
, m_modelHasNames(false)
|
|
||||||
{
|
{
|
||||||
m_installedModels->setSourceModel(this);
|
m_installedModels->setSourceModel(this);
|
||||||
m_downloadableModels->setSourceModel(this);
|
m_downloadableModels->setSourceModel(this);
|
||||||
@ -97,7 +98,9 @@ ModelList::ModelList()
|
|||||||
m_watcher->addPath(exePath);
|
m_watcher->addPath(exePath);
|
||||||
m_watcher->addPath(m_localModelsPath);
|
m_watcher->addPath(m_localModelsPath);
|
||||||
connect(m_watcher, &QFileSystemWatcher::directoryChanged, this, &ModelList::updateModelsFromDirectory);
|
connect(m_watcher, &QFileSystemWatcher::directoryChanged, this, &ModelList::updateModelsFromDirectory);
|
||||||
|
connect(this, &ModelList::localModelsPathChanged, this, &ModelList::updateModelList);
|
||||||
updateModelsFromDirectory();
|
updateModelsFromDirectory();
|
||||||
|
updateModelList();
|
||||||
}
|
}
|
||||||
|
|
||||||
QString ModelList::incompleteDownloadPath(const QString &modelFile)
|
QString ModelList::incompleteDownloadPath(const QString &modelFile)
|
||||||
@ -516,3 +519,166 @@ void ModelList::updateModelsFromDirectory()
|
|||||||
if (localPath != exePath)
|
if (localPath != exePath)
|
||||||
processDirectory(localPath);
|
processDirectory(localPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void ModelList::updateModelList()
|
||||||
|
{
|
||||||
|
#if defined(USE_LOCAL_MODELSJSON)
|
||||||
|
QUrl jsonUrl("file://" + QDir::homePath() + "/dev/large_language_models/gpt4all/gpt4all-chat/metadata/models.json");
|
||||||
|
#else
|
||||||
|
QUrl jsonUrl("http://gpt4all.io/models/models.json");
|
||||||
|
#endif
|
||||||
|
QNetworkRequest request(jsonUrl);
|
||||||
|
QSslConfiguration conf = request.sslConfiguration();
|
||||||
|
conf.setPeerVerifyMode(QSslSocket::VerifyNone);
|
||||||
|
request.setSslConfiguration(conf);
|
||||||
|
QNetworkReply *jsonReply = m_networkManager.get(request);
|
||||||
|
QEventLoop loop;
|
||||||
|
connect(jsonReply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
|
||||||
|
QTimer::singleShot(1500, &loop, &QEventLoop::quit);
|
||||||
|
loop.exec();
|
||||||
|
if (jsonReply->error() == QNetworkReply::NoError && jsonReply->isFinished()) {
|
||||||
|
QByteArray jsonData = jsonReply->readAll();
|
||||||
|
jsonReply->deleteLater();
|
||||||
|
parseModelsJsonFile(jsonData);
|
||||||
|
} else {
|
||||||
|
qWarning() << "Could not download models.json";
|
||||||
|
}
|
||||||
|
delete jsonReply;
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool operator==(const ModelInfo& lhs, const ModelInfo& rhs) {
|
||||||
|
return lhs.filename == rhs.filename && lhs.md5sum == rhs.md5sum;
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool compareVersions(const QString &a, const QString &b) {
|
||||||
|
QStringList aParts = a.split('.');
|
||||||
|
QStringList bParts = b.split('.');
|
||||||
|
|
||||||
|
for (int i = 0; i < std::min(aParts.size(), bParts.size()); ++i) {
|
||||||
|
int aInt = aParts[i].toInt();
|
||||||
|
int bInt = bParts[i].toInt();
|
||||||
|
|
||||||
|
if (aInt > bInt) {
|
||||||
|
return true;
|
||||||
|
} else if (aInt < bInt) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return aParts.size() > bParts.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
void ModelList::parseModelsJsonFile(const QByteArray &jsonData)
|
||||||
|
{
|
||||||
|
QJsonParseError err;
|
||||||
|
QJsonDocument document = QJsonDocument::fromJson(jsonData, &err);
|
||||||
|
if (err.error != QJsonParseError::NoError) {
|
||||||
|
qWarning() << "ERROR: Couldn't parse: " << jsonData << err.errorString();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
QJsonArray jsonArray = document.array();
|
||||||
|
const QString currentVersion = QCoreApplication::applicationVersion();
|
||||||
|
|
||||||
|
for (const QJsonValue &value : jsonArray) {
|
||||||
|
QJsonObject obj = value.toObject();
|
||||||
|
|
||||||
|
QString modelName = obj["name"].toString();
|
||||||
|
QString modelFilename = obj["filename"].toString();
|
||||||
|
QString modelFilesize = obj["filesize"].toString();
|
||||||
|
QString requiresVersion = obj["requires"].toString();
|
||||||
|
QString deprecatedVersion = obj["deprecated"].toString();
|
||||||
|
QString url = obj["url"].toString();
|
||||||
|
QByteArray modelMd5sum = obj["md5sum"].toString().toLatin1().constData();
|
||||||
|
bool isDefault = obj.contains("isDefault") && obj["isDefault"] == QString("true");
|
||||||
|
bool disableGUI = obj.contains("disableGUI") && obj["disableGUI"] == QString("true");
|
||||||
|
QString description = obj["description"].toString();
|
||||||
|
QString order = obj["order"].toString();
|
||||||
|
int ramrequired = obj["ramrequired"].toString().toInt();
|
||||||
|
QString parameters = obj["parameters"].toString();
|
||||||
|
QString quant = obj["quant"].toString();
|
||||||
|
QString type = obj["type"].toString();
|
||||||
|
|
||||||
|
// If the currentVersion version is strictly less than required version, then continue
|
||||||
|
if (!requiresVersion.isEmpty()
|
||||||
|
&& requiresVersion != currentVersion
|
||||||
|
&& compareVersions(requiresVersion, currentVersion)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the current version is strictly greater than the deprecated version, then continue
|
||||||
|
if (!deprecatedVersion.isEmpty()
|
||||||
|
&& compareVersions(currentVersion, deprecatedVersion)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
modelFilesize = ModelList::toFileSize(modelFilesize.toULongLong());
|
||||||
|
|
||||||
|
if (!contains(modelFilename))
|
||||||
|
addModel(modelFilename);
|
||||||
|
|
||||||
|
if (!modelName.isEmpty())
|
||||||
|
updateData(modelFilename, ModelList::NameRole, modelName);
|
||||||
|
updateData(modelFilename, ModelList::FilesizeRole, modelFilesize);
|
||||||
|
updateData(modelFilename, ModelList::Md5sumRole, modelMd5sum);
|
||||||
|
updateData(modelFilename, ModelList::DefaultRole, isDefault);
|
||||||
|
updateData(modelFilename, ModelList::DescriptionRole, description);
|
||||||
|
updateData(modelFilename, ModelList::RequiresVersionRole, requiresVersion);
|
||||||
|
updateData(modelFilename, ModelList::DeprecatedVersionRole, deprecatedVersion);
|
||||||
|
updateData(modelFilename, ModelList::UrlRole, url);
|
||||||
|
updateData(modelFilename, ModelList::DisableGUIRole, disableGUI);
|
||||||
|
updateData(modelFilename, ModelList::OrderRole, order);
|
||||||
|
updateData(modelFilename, ModelList::RamrequiredRole, ramrequired);
|
||||||
|
updateData(modelFilename, ModelList::ParametersRole, parameters);
|
||||||
|
updateData(modelFilename, ModelList::QuantRole, quant);
|
||||||
|
updateData(modelFilename, ModelList::TypeRole, type);
|
||||||
|
}
|
||||||
|
|
||||||
|
const QString chatGPTDesc = tr("<ul><li>Requires personal OpenAI API key.</li><li>WARNING: Will send"
|
||||||
|
" your chats to OpenAI!</li><li>Your API key will be stored on disk</li><li>Will only be used"
|
||||||
|
" to communicate with OpenAI</li><li>You can apply for an API key"
|
||||||
|
" <a href=\"https://platform.openai.com/account/api-keys\">here.</a></li>");
|
||||||
|
|
||||||
|
{
|
||||||
|
const QString modelFilename = "chatgpt-gpt-3.5-turbo.txt";
|
||||||
|
if (!contains(modelFilename))
|
||||||
|
addModel(modelFilename);
|
||||||
|
updateData(modelFilename, ModelList::NameRole, "ChatGPT-3.5 Turbo");
|
||||||
|
updateData(modelFilename, ModelList::FilesizeRole, "minimal");
|
||||||
|
updateData(modelFilename, ModelList::ChatGPTRole, true);
|
||||||
|
updateData(modelFilename, ModelList::DescriptionRole,
|
||||||
|
tr("<strong>OpenAI's ChatGPT model GPT-3.5 Turbo</strong><br>") + chatGPTDesc);
|
||||||
|
updateData(modelFilename, ModelList::RequiresVersionRole, "2.4.2");
|
||||||
|
updateData(modelFilename, ModelList::OrderRole, "ca");
|
||||||
|
updateData(modelFilename, ModelList::RamrequiredRole, 0);
|
||||||
|
updateData(modelFilename, ModelList::ParametersRole, "?");
|
||||||
|
updateData(modelFilename, ModelList::QuantRole, "NA");
|
||||||
|
updateData(modelFilename, ModelList::TypeRole, "GPT");
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const QString modelFilename = "chatgpt-gpt-4.txt";
|
||||||
|
if (!contains(modelFilename))
|
||||||
|
addModel(modelFilename);
|
||||||
|
updateData(modelFilename, ModelList::NameRole, "ChatGPT-4");
|
||||||
|
updateData(modelFilename, ModelList::FilesizeRole, "minimal");
|
||||||
|
updateData(modelFilename, ModelList::ChatGPTRole, true);
|
||||||
|
updateData(modelFilename, ModelList::DescriptionRole,
|
||||||
|
tr("<strong>OpenAI's ChatGPT model GPT-4</strong><br>") + chatGPTDesc);
|
||||||
|
updateData(modelFilename, ModelList::RequiresVersionRole, "2.4.2");
|
||||||
|
updateData(modelFilename, ModelList::OrderRole, "cb");
|
||||||
|
updateData(modelFilename, ModelList::RamrequiredRole, 0);
|
||||||
|
updateData(modelFilename, ModelList::ParametersRole, "?");
|
||||||
|
updateData(modelFilename, ModelList::QuantRole, "NA");
|
||||||
|
updateData(modelFilename, ModelList::TypeRole, "GPT");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (installedModels()->count()) {
|
||||||
|
const QString firstModel =
|
||||||
|
installedModels()->firstFilename();
|
||||||
|
QSettings settings;
|
||||||
|
settings.sync();
|
||||||
|
settings.setValue("defaultModel", firstModel);
|
||||||
|
settings.sync();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -116,7 +116,6 @@ class ModelList : public QAbstractListModel
|
|||||||
Q_PROPERTY(InstalledModels* installedModels READ installedModels NOTIFY installedModelsChanged)
|
Q_PROPERTY(InstalledModels* installedModels READ installedModels NOTIFY installedModelsChanged)
|
||||||
Q_PROPERTY(DownloadableModels* downloadableModels READ downloadableModels NOTIFY downloadableModelsChanged)
|
Q_PROPERTY(DownloadableModels* downloadableModels READ downloadableModels NOTIFY downloadableModelsChanged)
|
||||||
Q_PROPERTY(QList<QString> userDefaultModelList READ userDefaultModelList NOTIFY userDefaultModelListChanged)
|
Q_PROPERTY(QList<QString> userDefaultModelList READ userDefaultModelList NOTIFY userDefaultModelListChanged)
|
||||||
Q_PROPERTY(bool modelHasNames READ modelHasNames NOTIFY modelHasNamesChanged)
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
static ModelList *globalInstance();
|
static ModelList *globalInstance();
|
||||||
@ -219,35 +218,33 @@ public:
|
|||||||
|
|
||||||
QString incompleteDownloadPath(const QString &modelFile);
|
QString incompleteDownloadPath(const QString &modelFile);
|
||||||
|
|
||||||
bool modelHasNames() const { return m_modelHasNames; }
|
|
||||||
void updateModelHasNames() { m_modelHasNames = true; emit modelHasNamesChanged(); }
|
|
||||||
|
|
||||||
Q_SIGNALS:
|
Q_SIGNALS:
|
||||||
void countChanged();
|
void countChanged();
|
||||||
void localModelsPathChanged();
|
void localModelsPathChanged();
|
||||||
void installedModelsChanged();
|
void installedModelsChanged();
|
||||||
void downloadableModelsChanged();
|
void downloadableModelsChanged();
|
||||||
void userDefaultModelListChanged();
|
void userDefaultModelListChanged();
|
||||||
void modelHasNamesChanged();
|
|
||||||
|
|
||||||
private Q_SLOTS:
|
private Q_SLOTS:
|
||||||
void updateModelsFromDirectory();
|
void updateModelsFromDirectory();
|
||||||
|
void updateModelList();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
QString modelDirPath(const QString &modelName, bool isChatGPT);
|
QString modelDirPath(const QString &modelName, bool isChatGPT);
|
||||||
int indexForModel(ModelInfo *model);
|
int indexForModel(ModelInfo *model);
|
||||||
QVariant dataInternal(const ModelInfo *info, int role) const;
|
QVariant dataInternal(const ModelInfo *info, int role) const;
|
||||||
static bool lessThan(const ModelInfo* a, const ModelInfo* b);
|
static bool lessThan(const ModelInfo* a, const ModelInfo* b);
|
||||||
|
void parseModelsJsonFile(const QByteArray &jsonData);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
mutable QMutex m_mutex;
|
mutable QMutex m_mutex;
|
||||||
|
QNetworkAccessManager m_networkManager;
|
||||||
InstalledModels *m_installedModels;
|
InstalledModels *m_installedModels;
|
||||||
DownloadableModels *m_downloadableModels;
|
DownloadableModels *m_downloadableModels;
|
||||||
QList<ModelInfo*> m_models;
|
QList<ModelInfo*> m_models;
|
||||||
QHash<QString, ModelInfo*> m_modelMap;
|
QHash<QString, ModelInfo*> m_modelMap;
|
||||||
QString m_localModelsPath;
|
QString m_localModelsPath;
|
||||||
QFileSystemWatcher *m_watcher;
|
QFileSystemWatcher *m_watcher;
|
||||||
bool m_modelHasNames;
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
explicit ModelList();
|
explicit ModelList();
|
||||||
|
Loading…
Reference in New Issue
Block a user