mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-07-19 01:36:37 +00:00
fix double semicolons
Signed-off-by: Jared Van Bortel <jared@nomic.ai>
This commit is contained in:
parent
158f9b9ee7
commit
6cde7cb671
@ -1659,7 +1659,7 @@ void Database::scanQueue()
|
||||
if (info.isPdf()) {
|
||||
QPdfDocument doc;
|
||||
if (doc.load(document_path) != QPdfDocument::Error::None) {
|
||||
qWarning() << "ERROR: Could not load pdf" << document_id << document_path;;
|
||||
qWarning() << "ERROR: Could not load pdf" << document_id << document_path;
|
||||
return updateFolderToIndex(folder_id, countForFolder);
|
||||
}
|
||||
title = doc.metaData(QPdfDocument::MetaDataField::Title).toString();
|
||||
|
@ -502,7 +502,7 @@ ModelList::ModelList()
|
||||
connect(MySettings::globalInstance(), &MySettings::contextLengthChanged, this, &ModelList::updateDataForSettings);
|
||||
connect(MySettings::globalInstance(), &MySettings::gpuLayersChanged, this, &ModelList::updateDataForSettings);
|
||||
connect(MySettings::globalInstance(), &MySettings::repeatPenaltyChanged, this, &ModelList::updateDataForSettings);
|
||||
connect(MySettings::globalInstance(), &MySettings::repeatPenaltyTokensChanged, this, &ModelList::updateDataForSettings);;
|
||||
connect(MySettings::globalInstance(), &MySettings::repeatPenaltyTokensChanged, this, &ModelList::updateDataForSettings);
|
||||
connect(MySettings::globalInstance(), &MySettings::promptTemplateChanged, this, &ModelList::updateDataForSettings);
|
||||
connect(MySettings::globalInstance(), &MySettings::systemPromptChanged, this, &ModelList::updateDataForSettings);
|
||||
connect(&m_networkManager, &QNetworkAccessManager::sslErrors, this, &ModelList::handleSslErrors);
|
||||
@ -2094,7 +2094,7 @@ void ModelList::parseDiscoveryJsonFile(const QByteArray &jsonData)
|
||||
emit discoverProgressChanged();
|
||||
if (!m_discoverNumberOfResults) {
|
||||
m_discoverInProgress = false;
|
||||
emit discoverInProgressChanged();;
|
||||
emit discoverInProgressChanged();
|
||||
}
|
||||
}
|
||||
|
||||
@ -2172,7 +2172,7 @@ void ModelList::handleDiscoveryItemFinished()
|
||||
if (discoverProgress() >= 1.0) {
|
||||
emit layoutChanged();
|
||||
m_discoverInProgress = false;
|
||||
emit discoverInProgressChanged();;
|
||||
emit discoverInProgressChanged();
|
||||
}
|
||||
|
||||
reply->deleteLater();
|
||||
|
@ -186,7 +186,7 @@ void MySettings::restoreModelDefaults(const ModelInfo &info)
|
||||
setModelTemperature(info, info.m_temperature);
|
||||
setModelTopP(info, info.m_topP);
|
||||
setModelMinP(info, info.m_minP);
|
||||
setModelTopK(info, info.m_topK);;
|
||||
setModelTopK(info, info.m_topK);
|
||||
setModelMaxLength(info, info.m_maxLength);
|
||||
setModelPromptBatchSize(info, info.m_promptBatchSize);
|
||||
setModelContextLength(info, info.m_contextLength);
|
||||
|
Loading…
Reference in New Issue
Block a user