Consolidate generation and application settings on the new settings object.

This commit is contained in:
Adam Treat 2023-06-28 16:05:35 -04:00 committed by AT
parent 7f66c28649
commit 285aa50b60
14 changed files with 57 additions and 207 deletions

View File

@ -1,6 +1,6 @@
#include "chat.h"
#include "chatlistmodel.h"
#include "llm.h"
#include "mysettings.h"
#include "modellist.h"
#include "network.h"
#include "server.h"
@ -116,6 +116,10 @@ void Chat::prompt(const QString &prompt, const QString &prompt_template, int32_t
int32_t repeat_penalty_tokens)
{
resetResponseState();
int threadCount = MySettings::globalInstance()->threadCount();
if (threadCount <= 0)
threadCount = std::min(4, (int32_t) std::thread::hardware_concurrency());
emit promptRequested(
m_collections,
prompt,
@ -127,7 +131,7 @@ void Chat::prompt(const QString &prompt, const QString &prompt_template, int32_t
n_batch,
repeat_penalty,
repeat_penalty_tokens,
LLM::globalInstance()->threadCount());
threadCount);
}
void Chat::regenerateResponse()

View File

@ -1,5 +1,5 @@
#include "chatlistmodel.h"
#include "llm.h"
#include "mysettings.h"
#include <QFile>
#include <QDataStream>
@ -20,7 +20,6 @@ ChatListModel::ChatListModel()
, m_dummyChat(nullptr)
, m_serverChat(nullptr)
, m_currentChat(nullptr)
, m_shouldSaveChats(false)
{
addDummyChat();
@ -29,38 +28,15 @@ ChatListModel::ChatListModel()
connect(thread, &ChatsRestoreThread::finished, this, &ChatListModel::chatsRestoredFinished);
connect(thread, &ChatsRestoreThread::finished, thread, &QObject::deleteLater);
thread->start();
}
bool ChatListModel::shouldSaveChats() const
{
return m_shouldSaveChats;
}
connect(MySettings::globalInstance(), &MySettings::serverChatChanged, this, &ChatListModel::handleServerEnabledChanged);
void ChatListModel::setShouldSaveChats(bool b)
{
if (m_shouldSaveChats == b)
return;
m_shouldSaveChats = b;
emit shouldSaveChatsChanged();
}
bool ChatListModel::shouldSaveChatGPTChats() const
{
return m_shouldSaveChatGPTChats;
}
void ChatListModel::setShouldSaveChatGPTChats(bool b)
{
if (m_shouldSaveChatGPTChats == b)
return;
m_shouldSaveChatGPTChats = b;
emit shouldSaveChatGPTChatsChanged();
}
void ChatListModel::removeChatFile(Chat *chat) const
{
Q_ASSERT(chat != m_serverChat);
const QString savePath = ModelList::globalInstance()->localModelsPath();
const QString savePath = MySettings::globalInstance()->modelPath();
QFile file(savePath + "/gpt4all-" + chat->id() + ".chat");
if (!file.exists())
return;
@ -78,15 +54,15 @@ ChatSaver::ChatSaver()
void ChatListModel::saveChats()
{
const QString savePath = ModelList::globalInstance()->localModelsPath();
const QString savePath = MySettings::globalInstance()->modelPath();
QVector<Chat*> toSave;
for (Chat *chat : m_chats) {
if (chat == m_serverChat)
continue;
const bool isChatGPT = chat->modelInfo().isChatGPT;
if (!isChatGPT && !m_shouldSaveChats)
if (!isChatGPT && !MySettings::globalInstance()->saveChats())
continue;
if (isChatGPT && !m_shouldSaveChatGPTChats)
if (isChatGPT && !MySettings::globalInstance()->saveChatGPTChats())
continue;
toSave.append(chat);
}
@ -105,7 +81,7 @@ void ChatSaver::saveChats(const QVector<Chat *> &chats)
{
QElapsedTimer timer;
timer.start();
const QString savePath = ModelList::globalInstance()->localModelsPath();
const QString savePath = MySettings::globalInstance()->modelPath();
for (Chat *chat : chats) {
QString fileName = "gpt4all-" + chat->id() + ".chat";
QFile file(savePath + "/" + fileName);
@ -168,7 +144,7 @@ void ChatsRestoreThread::run()
}
}
{
const QString savePath = ModelList::globalInstance()->localModelsPath();
const QString savePath = MySettings::globalInstance()->modelPath();
QDir dir(savePath);
dir.setNameFilters(QStringList() << "gpt4all-*.chat");
QStringList fileNames = dir.entryList();
@ -300,7 +276,7 @@ void ChatListModel::chatsRestoredFinished()
void ChatListModel::handleServerEnabledChanged()
{
if (LLM::globalInstance()->serverEnabled() || m_serverChat != m_currentChat)
if (MySettings::globalInstance()->serverChat() || m_serverChat != m_currentChat)
return;
Chat *nextChat = get(0);

View File

@ -36,8 +36,6 @@ class ChatListModel : public QAbstractListModel
Q_OBJECT
Q_PROPERTY(int count READ count NOTIFY countChanged)
Q_PROPERTY(Chat *currentChat READ currentChat WRITE setCurrentChat NOTIFY currentChatChanged)
Q_PROPERTY(bool shouldSaveChats READ shouldSaveChats WRITE setShouldSaveChats NOTIFY shouldSaveChatsChanged)
Q_PROPERTY(bool shouldSaveChatGPTChats READ shouldSaveChatGPTChats WRITE setShouldSaveChatGPTChats NOTIFY shouldSaveChatGPTChatsChanged)
public:
static ChatListModel *globalInstance();
@ -217,8 +215,6 @@ public Q_SLOTS:
Q_SIGNALS:
void countChanged();
void currentChatChanged();
void shouldSaveChatsChanged();
void shouldSaveChatGPTChatsChanged();
void chatsSavedFinished();
void requestSaveChats(const QVector<Chat*> &);
void saveChatsFinished();
@ -255,8 +251,6 @@ private Q_SLOTS:
}
private:
bool m_shouldSaveChats;
bool m_shouldSaveChatGPTChats;
Chat* m_newChat;
Chat* m_dummyChat;
Chat* m_serverChat;

View File

@ -1,5 +1,5 @@
#include "database.h"
#include "modellist.h"
#include "mysettings.h"
#include <QTimer>
#include <QPdfDocument>
@ -415,7 +415,7 @@ bool selectDocuments(QSqlQuery &q, int folder_id, QList<int> *documentIds) {
QSqlError initDb()
{
QString dbPath = ModelList::globalInstance()->localModelsPath()
QString dbPath = MySettings::globalInstance()->modelPath()
+ QString("localdocs_v%1.db").arg(LOCALDOCS_VERSION);
QSqlDatabase db = QSqlDatabase::addDatabase("QSQLITE");
db.setDatabaseName(dbPath);

View File

@ -1,6 +1,7 @@
#include "download.h"
#include "network.h"
#include "modellist.h"
#include "mysettings.h"
#include <QCoreApplication>
#include <QNetworkRequest>
@ -174,7 +175,7 @@ void Download::installModel(const QString &modelFile, const QString &apiKey)
return;
Network::globalInstance()->sendInstallModel(modelFile);
QString filePath = ModelList::globalInstance()->localModelsPath() + modelFile + ".txt";
QString filePath = MySettings::globalInstance()->modelPath() + modelFile + ".txt";
QFile file(filePath);
if (file.open(QIODeviceBase::WriteOnly | QIODeviceBase::Text)) {
QTextStream stream(&file);
@ -185,7 +186,7 @@ void Download::installModel(const QString &modelFile, const QString &apiKey)
void Download::removeModel(const QString &modelFile)
{
const QString filePath = ModelList::globalInstance()->localModelsPath() + modelFile;
const QString filePath = MySettings::globalInstance()->modelPath() + modelFile;
QFile incompleteFile(ModelList::globalInstance()->incompleteDownloadPath(modelFile));
if (incompleteFile.exists()) {
incompleteFile.remove();
@ -420,7 +421,7 @@ void Download::handleModelDownloadFinished()
// Notify that we are calculating hash
ModelList::globalInstance()->updateData(modelFilename, ModelList::CalcHashRole, true);
QByteArray md5sum = ModelList::globalInstance()->modelInfo(modelFilename).md5sum;
const QString saveFilePath = ModelList::globalInstance()->localModelsPath() + modelFilename;
const QString saveFilePath = MySettings::globalInstance()->modelPath() + modelFilename;
emit requestHashAndSave(md5sum, saveFilePath, tempFile, modelReply);
}

View File

@ -1,6 +1,5 @@
#include "llm.h"
#include "../gpt4all-backend/sysinfo.h"
#include "config.h"
#include "chatlistmodel.h"
#include "../gpt4all-backend/llmodel.h"
#include "network.h"
@ -22,8 +21,6 @@ LLM *LLM::globalInstance()
LLM::LLM()
: QObject{nullptr}
, m_threadCount(std::min(4, (int32_t) std::thread::hardware_concurrency()))
, m_serverEnabled(false)
, m_compatHardware(true)
{
QString llmodelSearchPaths = QCoreApplication::applicationDirPath();
@ -39,8 +36,6 @@ LLM::LLM()
llmodelSearchPaths += ";" + frameworksDir;
#endif
LLModel::setImplementationsSearchPath(llmodelSearchPaths.toStdString());
connect(this, &LLM::serverEnabledChanged,
ChatListModel::globalInstance(), &ChatListModel::handleServerEnabledChanged);
#if defined(__x86_64__)
#ifndef _MSC_VER
@ -105,29 +100,3 @@ QString LLM::systemTotalRAMInGBString() const
{
return QString::fromStdString(getSystemTotalRAMInGBString());
}
int32_t LLM::threadCount() const
{
return m_threadCount;
}
void LLM::setThreadCount(int32_t n_threads)
{
if (n_threads <= 0)
n_threads = std::min(4, (int32_t) std::thread::hardware_concurrency());
m_threadCount = n_threads;
emit threadCountChanged();
}
bool LLM::serverEnabled() const
{
return m_serverEnabled;
}
void LLM::setServerEnabled(bool enabled)
{
if (m_serverEnabled == enabled)
return;
m_serverEnabled = enabled;
emit serverEnabledChanged();
}

View File

@ -6,19 +6,11 @@
class LLM : public QObject
{
Q_OBJECT
Q_PROPERTY(int32_t threadCount READ threadCount WRITE setThreadCount NOTIFY threadCountChanged)
Q_PROPERTY(bool serverEnabled READ serverEnabled WRITE setServerEnabled NOTIFY serverEnabledChanged)
Q_PROPERTY(bool compatHardware READ compatHardware NOTIFY compatHardwareChanged)
public:
static LLM *globalInstance();
// FIXME: Move all settings to the new settings singleton
int32_t threadCount() const;
void setThreadCount(int32_t n_threads);
bool serverEnabled() const;
void setServerEnabled(bool enabled);
bool compatHardware() const { return m_compatHardware; }
Q_INVOKABLE bool checkForUpdates() const;
@ -30,13 +22,9 @@ public:
Q_SIGNALS:
void chatListModelChanged();
void modelListChanged();
void threadCountChanged();
void serverEnabledChanged();
void compatHardwareChanged();
private:
int32_t m_threadCount;
bool m_serverEnabled;
bool m_compatHardware;
private:

View File

@ -1,4 +1,5 @@
#include "modellist.h"
#include "mysettings.h"
#include <algorithm>
@ -93,19 +94,18 @@ ModelList::ModelList()
m_watcher = new QFileSystemWatcher(this);
QSettings settings;
settings.sync();
m_localModelsPath = settings.value("modelPath", defaultLocalModelsPath()).toString();
const QString exePath = QCoreApplication::applicationDirPath() + QDir::separator();
m_watcher->addPath(exePath);
m_watcher->addPath(m_localModelsPath);
m_watcher->addPath(MySettings::globalInstance()->modelPath());
connect(m_watcher, &QFileSystemWatcher::directoryChanged, this, &ModelList::updateModelsFromDirectory);
connect(this, &ModelList::localModelsPathChanged, this, &ModelList::updateModelList);
connect(MySettings::globalInstance(), &MySettings::modelPathChanged, this, &ModelList::updateModelList);
updateModelsFromDirectory();
updateModelList();
}
QString ModelList::incompleteDownloadPath(const QString &modelFile)
{
return localModelsPath() + "incomplete-" + modelFile;
return MySettings::globalInstance()->modelPath() + "incomplete-" + modelFile;
}
const QList<ModelInfo> ModelList::exportModelList() const
@ -415,49 +415,6 @@ ModelInfo ModelList::modelInfo(const QString &filename) const
return *m_modelMap.value(filename);
}
QString ModelList::defaultLocalModelsPath() const
{
QString localPath = QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation)
+ "/";
QString testWritePath = localPath + QString("test_write.txt");
QString canonicalLocalPath = QFileInfo(localPath).canonicalFilePath() + "/";
QDir localDir(localPath);
if (!localDir.exists()) {
if (!localDir.mkpath(localPath)) {
qWarning() << "ERROR: Local download directory can't be created:" << canonicalLocalPath;
return canonicalLocalPath;
}
}
if (QFileInfo::exists(testWritePath))
return canonicalLocalPath;
QFile testWriteFile(testWritePath);
if (testWriteFile.open(QIODeviceBase::ReadWrite)) {
testWriteFile.close();
return canonicalLocalPath;
}
qWarning() << "ERROR: Local download path appears not writeable:" << canonicalLocalPath;
return canonicalLocalPath;
}
QString ModelList::localModelsPath() const
{
return m_localModelsPath;
}
void ModelList::setLocalModelsPath(const QString &modelPath)
{
QString filePath = (modelPath.startsWith("file://") ?
QUrl(modelPath).toLocalFile() : modelPath);
QString canonical = QFileInfo(filePath).canonicalFilePath() + "/";
if (m_localModelsPath != canonical) {
m_localModelsPath = canonical;
emit localModelsPathChanged();
}
}
QString ModelList::modelDirPath(const QString &modelName, bool isChatGPT)
{
QVector<QString> possibleFilePaths;
@ -473,10 +430,10 @@ QString ModelList::modelDirPath(const QString &modelName, bool isChatGPT)
if (infoAppPath.exists())
return QCoreApplication::applicationDirPath();
QString downloadPath = localModelsPath() + modelFilename;
QString downloadPath = MySettings::globalInstance()->modelPath() + modelFilename;
QFileInfo infoLocalPath(downloadPath);
if (infoLocalPath.exists())
return localModelsPath();
return MySettings::globalInstance()->modelPath();
}
return QString();
}
@ -484,7 +441,7 @@ QString ModelList::modelDirPath(const QString &modelName, bool isChatGPT)
void ModelList::updateModelsFromDirectory()
{
const QString exePath = QCoreApplication::applicationDirPath() + QDir::separator();
const QString localPath = localModelsPath();
const QString localPath = MySettings::globalInstance()->modelPath();
auto processDirectory = [&](const QString& path) {
QDirIterator it(path, QDirIterator::Subdirectories);

View File

@ -112,7 +112,6 @@ class ModelList : public QAbstractListModel
{
Q_OBJECT
Q_PROPERTY(int count READ count NOTIFY countChanged)
Q_PROPERTY(QString localModelsPath READ localModelsPath WRITE setLocalModelsPath NOTIFY localModelsPathChanged)
Q_PROPERTY(InstalledModels* installedModels READ installedModels NOTIFY installedModelsChanged)
Q_PROPERTY(DownloadableModels* downloadableModels READ downloadableModels NOTIFY downloadableModelsChanged)
Q_PROPERTY(QList<QString> userDefaultModelList READ userDefaultModelList NOTIFY userDefaultModelListChanged)
@ -194,10 +193,6 @@ public:
void addModel(const QString &filename);
Q_INVOKABLE QString defaultLocalModelsPath() const;
Q_INVOKABLE QString localModelsPath() const;
Q_INVOKABLE void setLocalModelsPath(const QString &modelPath);
const QList<ModelInfo> exportModelList() const;
const QList<QString> userDefaultModelList() const;
@ -220,7 +215,6 @@ public:
Q_SIGNALS:
void countChanged();
void localModelsPathChanged();
void installedModelsChanged();
void downloadableModelsChanged();
void userDefaultModelListChanged();
@ -243,7 +237,6 @@ private:
DownloadableModels *m_downloadableModels;
QList<ModelInfo*> m_models;
QHash<QString, ModelInfo*> m_modelMap;
QString m_localModelsPath;
QFileSystemWatcher *m_watcher;
private:

View File

@ -5,6 +5,7 @@
#include <QFileInfo>
#include <QSettings>
#include <QStandardPaths>
#include <QUrl>
static double default_temperature = 0.7;
static double default_topP = 0.1;
@ -309,11 +310,13 @@ QString MySettings::modelPath() const
void MySettings::setModelPath(const QString &p)
{
if (modelPath() == p)
QString filePath = (p.startsWith("file://") ?
QUrl(p).toLocalFile() : p);
QString canonical = QFileInfo(filePath).canonicalFilePath() + "/";
if (modelPath() == canonical)
return;
QSettings setting;
setting.setValue("modelPath", p);
setting.setValue("modelPath", canonical);
setting.sync();
emit modelPathChanged();
}

View File

@ -7,6 +7,7 @@ import chatlistmodel
import llm
import download
import network
import mysettings
Drawer {
id: chatDrawer
@ -80,7 +81,7 @@ Drawer {
property bool isCurrent: ChatListModel.currentChat === ChatListModel.get(index)
property bool isServer: ChatListModel.get(index) && ChatListModel.get(index).isServer
property bool trashQuestionDisplayed: false
visible: !isServer || LLM.serverEnabled
visible: !isServer || MySettings.serverChat
z: isCurrent ? 199 : 1
color: isServer ? theme.backgroundDarkest : (index % 2 === 0 ? theme.backgroundLight : theme.backgroundLighter)
border.width: isCurrent

View File

@ -9,6 +9,7 @@ import download
import llm
import modellist
import network
import mysettings
Dialog {
id: modelDownloaderDialog
@ -29,21 +30,6 @@ Dialog {
Network.sendModelDownloaderDialog();
}
property string defaultModelPath: ModelList.defaultLocalModelsPath()
property alias modelPath: settings.modelPath
Settings {
id: settings
property string modelPath: modelDownloaderDialog.defaultModelPath
}
Component.onCompleted: {
ModelList.localModelsPath = settings.modelPath
}
Component.onDestruction: {
settings.sync()
}
PopupDialog {
id: downloadingErrorPopup
anchors.centerIn: parent
@ -416,12 +402,9 @@ Dialog {
FolderDialog {
id: modelPathDialog
title: "Please choose a directory"
currentFolder: "file://" + ModelList.localModelsPath
currentFolder: "file://" + MySettings.modelsPath
onAccepted: {
modelPathDisplayField.text = selectedFolder
ModelList.localModelsPath = modelPathDisplayField.text
settings.modelPath = ModelList.localModelsPath
settings.sync()
MySettings.modelsPath = selectedFolder
}
}
Label {
@ -433,7 +416,7 @@ Dialog {
}
MyDirectoryField {
id: modelPathDisplayField
text: ModelList.localModelsPath
text: MySettings.modelPath
Layout.fillWidth: true
ToolTip.text: qsTr("Path where model files will be downloaded to")
ToolTip.visible: hovered
@ -442,11 +425,9 @@ Dialog {
Accessible.description: ToolTip.text
onEditingFinished: {
if (isValid) {
ModelList.localModelsPath = modelPathDisplayField.text
settings.modelPath = ModelList.localModelsPath
settings.sync()
MySettings.modelsPath = modelPathDisplayField.text
} else {
text = ModelList.localModelsPath
text = MySettings.modelPath
}
}
}

View File

@ -43,20 +43,6 @@ Dialog {
function restoreApplicationDefaults() {
MySettings.restoreApplicationDefaults();
ModelList.localModelsPath = MySettings.modelPath
LLM.threadCount = MySettings.threadCount
LLM.serverEnabled = MySettings.serverChat
ChatListModel.shouldSaveChats = MySettings.saveChats
ChatListModel.shouldSaveChatGPTChats = MySettings.saveChatGPTChats
MySettings.forceMetal = false
}
Component.onCompleted: {
LLM.threadCount = MySettings.threadCount
LLM.serverEnabled = MySettings.serverChat
ChatListModel.shouldSaveChats = MySettings.saveChats
ChatListModel.shouldSaveChatGPTChats = MySettings.saveChatGPTChats
ModelList.localModelsPath = MySettings.modelPath
}
Item {
@ -572,11 +558,9 @@ Dialog {
FolderDialog {
id: modelPathDialog
title: "Please choose a directory"
currentFolder: "file://" + ModelList.localModelsPath
currentFolder: "file://" + MySettings.modelPath
onAccepted: {
modelPathDisplayField.text = selectedFolder
ModelList.localModelsPath = modelPathDisplayField.text
MySettings.modelPath = ModelList.localModelsPath
MySettings.modelPath = selectedFolder
}
}
Label {
@ -588,7 +572,7 @@ Dialog {
}
MyDirectoryField {
id: modelPathDisplayField
text: ModelList.localModelsPath
text: MySettings.modelPath
implicitWidth: 300
Layout.row: 2
Layout.column: 1
@ -600,10 +584,9 @@ Dialog {
Accessible.description: ToolTip.text
onEditingFinished: {
if (isValid) {
ModelList.localModelsPath = modelPathDisplayField.text
MySettings.modelPath = ModelList.localModelsPath
MySettings.modelPath = modelPathDisplayField.text
} else {
text = ModelList.localModelsPath
text = MySettings.modelPath
}
}
}
@ -635,7 +618,6 @@ Dialog {
var val = parseInt(text)
if (!isNaN(val)) {
MySettings.threadCount = val
LLM.threadCount = val
focus = false
} else {
text = MySettings.threadCount
@ -660,7 +642,6 @@ Dialog {
onClicked: {
Network.sendSaveChatsToggled(saveChatsBox.checked);
MySettings.saveChats = !MySettings.saveChats
ChatListModel.shouldSaveChats = saveChatsBox.checked
}
ToolTip.text: qsTr("WARNING: Saving chats to disk can be ~2GB per chat")
ToolTip.visible: hovered
@ -679,7 +660,6 @@ Dialog {
checked: MySettings.saveChatGPTChats
onClicked: {
MySettings.saveChatGPTChats = !MySettings.saveChatGPTChats
ChatListModel.shouldSaveChatGPTChats = saveChatGPTChatsBox.checked
}
}
Label {
@ -696,7 +676,6 @@ Dialog {
checked: MySettings.serverChat
onClicked: {
MySettings.serverChat = !MySettings.serverChat
LLM.serverEnabled = serverChatBox.checked
}
ToolTip.text: qsTr("WARNING: This enables the gui to act as a local REST web server(OpenAI API compliant) for API requests and will increase your RAM usage as well")
ToolTip.visible: hovered

View File

@ -1,6 +1,6 @@
#include "server.h"
#include "chat.h"
#include "llm.h"
#include "mysettings.h"
#include "modellist.h"
#include <QJsonDocument>
@ -78,7 +78,7 @@ void Server::start()
m_server->route("/v1/models", QHttpServerRequest::Method::Get,
[](const QHttpServerRequest &request) {
if (!LLM::globalInstance()->serverEnabled())
if (!MySettings::globalInstance()->serverChat())
return QHttpServerResponse(QHttpServerResponder::StatusCode::Unauthorized);
const QList<ModelInfo> modelList = ModelList::globalInstance()->exportModelList();
@ -97,7 +97,7 @@ void Server::start()
m_server->route("/v1/models/<arg>", QHttpServerRequest::Method::Get,
[](const QString &model, const QHttpServerRequest &request) {
if (!LLM::globalInstance()->serverEnabled())
if (!MySettings::globalInstance()->serverChat())
return QHttpServerResponse(QHttpServerResponder::StatusCode::Unauthorized);
const QList<ModelInfo> modelList = ModelList::globalInstance()->exportModelList();
@ -117,7 +117,7 @@ void Server::start()
m_server->route("/v1/completions", QHttpServerRequest::Method::Post,
[this](const QHttpServerRequest &request) {
if (!LLM::globalInstance()->serverEnabled())
if (!MySettings::globalInstance()->serverChat())
return QHttpServerResponse(QHttpServerResponder::StatusCode::Unauthorized);
return handleCompletionRequest(request, false);
}
@ -125,7 +125,7 @@ void Server::start()
m_server->route("/v1/chat/completions", QHttpServerRequest::Method::Post,
[this](const QHttpServerRequest &request) {
if (!LLM::globalInstance()->serverEnabled())
if (!MySettings::globalInstance()->serverChat())
return QHttpServerResponse(QHttpServerResponder::StatusCode::Unauthorized);
return handleCompletionRequest(request, true);
}
@ -303,6 +303,10 @@ QHttpServerResponse Server::handleCompletionRequest(const QHttpServerRequest &re
const float repeat_penalty = settings.value("repeatPenalty", m_ctx.repeat_penalty).toDouble();
const int repeat_last_n = settings.value("repeatPenaltyTokens", m_ctx.repeat_last_n).toInt();
int threadCount = MySettings::globalInstance()->threadCount();
if (threadCount <= 0)
threadCount = std::min(4, (int32_t) std::thread::hardware_concurrency());
int promptTokens = 0;
int responseTokens = 0;
QList<QPair<QString, QList<ResultInfo>>> responses;
@ -318,7 +322,7 @@ QHttpServerResponse Server::handleCompletionRequest(const QHttpServerRequest &re
n_batch,
repeat_penalty,
repeat_last_n,
LLM::globalInstance()->threadCount())) {
threadCount)) {
std::cerr << "ERROR: couldn't prompt model " << modelInfo.name.toStdString() << std::endl;
return QHttpServerResponse(QHttpServerResponder::StatusCode::InternalServerError);