diff --git a/gpt4all-chat/src/database.cpp b/gpt4all-chat/src/database.cpp index 9b1e9ecd..f89d3378 100644 --- a/gpt4all-chat/src/database.cpp +++ b/gpt4all-chat/src/database.cpp @@ -1659,7 +1659,7 @@ void Database::scanQueue() if (info.isPdf()) { QPdfDocument doc; if (doc.load(document_path) != QPdfDocument::Error::None) { - qWarning() << "ERROR: Could not load pdf" << document_id << document_path;; + qWarning() << "ERROR: Could not load pdf" << document_id << document_path; return updateFolderToIndex(folder_id, countForFolder); } title = doc.metaData(QPdfDocument::MetaDataField::Title).toString(); diff --git a/gpt4all-chat/src/modellist.cpp b/gpt4all-chat/src/modellist.cpp index db74209e..d37894ac 100644 --- a/gpt4all-chat/src/modellist.cpp +++ b/gpt4all-chat/src/modellist.cpp @@ -502,7 +502,7 @@ ModelList::ModelList() connect(MySettings::globalInstance(), &MySettings::contextLengthChanged, this, &ModelList::updateDataForSettings); connect(MySettings::globalInstance(), &MySettings::gpuLayersChanged, this, &ModelList::updateDataForSettings); connect(MySettings::globalInstance(), &MySettings::repeatPenaltyChanged, this, &ModelList::updateDataForSettings); - connect(MySettings::globalInstance(), &MySettings::repeatPenaltyTokensChanged, this, &ModelList::updateDataForSettings);; + connect(MySettings::globalInstance(), &MySettings::repeatPenaltyTokensChanged, this, &ModelList::updateDataForSettings); connect(MySettings::globalInstance(), &MySettings::promptTemplateChanged, this, &ModelList::updateDataForSettings); connect(MySettings::globalInstance(), &MySettings::systemPromptChanged, this, &ModelList::updateDataForSettings); connect(&m_networkManager, &QNetworkAccessManager::sslErrors, this, &ModelList::handleSslErrors); @@ -2094,7 +2094,7 @@ void ModelList::parseDiscoveryJsonFile(const QByteArray &jsonData) emit discoverProgressChanged(); if (!m_discoverNumberOfResults) { m_discoverInProgress = false; - emit discoverInProgressChanged();; + emit discoverInProgressChanged(); } } @@ -2172,7 +2172,7 @@ void ModelList::handleDiscoveryItemFinished() if (discoverProgress() >= 1.0) { emit layoutChanged(); m_discoverInProgress = false; - emit discoverInProgressChanged();; + emit discoverInProgressChanged(); } reply->deleteLater(); diff --git a/gpt4all-chat/src/mysettings.cpp b/gpt4all-chat/src/mysettings.cpp index 97af196f..38c8ab68 100644 --- a/gpt4all-chat/src/mysettings.cpp +++ b/gpt4all-chat/src/mysettings.cpp @@ -186,7 +186,7 @@ void MySettings::restoreModelDefaults(const ModelInfo &info) setModelTemperature(info, info.m_temperature); setModelTopP(info, info.m_topP); setModelMinP(info, info.m_minP); - setModelTopK(info, info.m_topK);; + setModelTopK(info, info.m_topK); setModelMaxLength(info, info.m_maxLength); setModelPromptBatchSize(info, info.m_promptBatchSize); setModelContextLength(info, info.m_contextLength);