settings: use enums for ChatTheme/FontSize, translate choices (#2667)

Also change SuggestionMode to work the same way.

Signed-off-by: Adam Treat <treat.adam@gmail.com>
Signed-off-by: Jared Van Bortel <jared@nomic.ai>
Co-authored-by: Jared Van Bortel <jared@nomic.ai>
This commit is contained in:
AT 2024-07-16 16:12:44 -04:00 committed by GitHub
parent f0c754bece
commit 88a206ab93
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 372 additions and 258 deletions

View File

@ -67,6 +67,8 @@ int main(int argc, char *argv[])
qmlRegisterSingletonInstance("download", 1, 0, "Download", Download::globalInstance()); qmlRegisterSingletonInstance("download", 1, 0, "Download", Download::globalInstance());
qmlRegisterSingletonInstance("network", 1, 0, "Network", Network::globalInstance()); qmlRegisterSingletonInstance("network", 1, 0, "Network", Network::globalInstance());
qmlRegisterSingletonInstance("localdocs", 1, 0, "LocalDocs", LocalDocs::globalInstance()); qmlRegisterSingletonInstance("localdocs", 1, 0, "LocalDocs", LocalDocs::globalInstance());
qmlRegisterUncreatableMetaObject(MySettingsEnums::staticMetaObject, "mysettingsenums", 1, 0, "MySettingsEnums", "Error: only enums");
const QUrl url(u"qrc:/gpt4all/main.qml"_qs); const QUrl url(u"qrc:/gpt4all/main.qml"_qs);
QObject::connect(&engine, &QQmlApplicationEngine::objectCreated, QObject::connect(&engine, &QQmlApplicationEngine::objectCreated,

View File

@ -24,6 +24,11 @@
using namespace Qt::Literals::StringLiterals; using namespace Qt::Literals::StringLiterals;
// used only for settings serialization, do not translate
static const QStringList suggestionModeNames { "LocalDocsOnly", "On", "Off" };
static const QStringList chatThemeNames { "Light", "Dark", "LegacyDark" };
static const QStringList fontSizeNames { "Small", "Medium", "Large" };
// FIXME: All of these default strings that are shown in the UI for settings need to be marked as // FIXME: All of these default strings that are shown in the UI for settings need to be marked as
// translatable // translatable
@ -39,8 +44,8 @@ static const QString languageAndLocale = "Default";
} // namespace defaults } // namespace defaults
static const QVariantMap basicDefaults { static const QVariantMap basicDefaults {
{ "chatTheme", "Light" }, { "chatTheme", QVariant::fromValue(ChatTheme::Light) },
{ "fontSize", "Small" }, { "fontSize", QVariant::fromValue(FontSize::Small) },
{ "lastVersionStarted", "" }, { "lastVersionStarted", "" },
{ "networkPort", 4891, }, { "networkPort", 4891, },
{ "saveChatsContext", false }, { "saveChatsContext", false },
@ -169,6 +174,12 @@ void MySettings::setBasicSetting(const QString &name, const QVariant &value, std
QMetaObject::invokeMethod(this, u"%1Changed"_s.arg(signal.value_or(name)).toLatin1().constData()); QMetaObject::invokeMethod(this, u"%1Changed"_s.arg(signal.value_or(name)).toLatin1().constData());
} }
int MySettings::getEnumSetting(const QString &setting, const QStringList &valueNames) const
{
int idx = valueNames.indexOf(getBasicSetting(setting).toString());
return idx != -1 ? idx : *reinterpret_cast<const int *>(basicDefaults.value(setting).constData());
}
void MySettings::restoreModelDefaults(const ModelInfo &info) void MySettings::restoreModelDefaults(const ModelInfo &info)
{ {
setModelTemperature(info, info.m_temperature); setModelTemperature(info, info.m_temperature);
@ -189,8 +200,8 @@ void MySettings::restoreModelDefaults(const ModelInfo &info)
void MySettings::restoreApplicationDefaults() void MySettings::restoreApplicationDefaults()
{ {
setChatTheme(basicDefaults.value("chatTheme").toString()); setChatTheme(basicDefaults.value("chatTheme").value<ChatTheme>());
setFontSize(basicDefaults.value("fontSize").toString()); setFontSize(basicDefaults.value("fontSize").value<FontSize>());
setDevice(defaults::device); setDevice(defaults::device);
setThreadCount(defaults::threadCount); setThreadCount(defaults::threadCount);
setSaveChatsContext(basicDefaults.value("saveChatsContext").toBool()); setSaveChatsContext(basicDefaults.value("saveChatsContext").toBool());
@ -431,29 +442,28 @@ void MySettings::setThreadCount(int value)
emit threadCountChanged(); emit threadCountChanged();
} }
bool MySettings::saveChatsContext() const { return getBasicSetting("saveChatsContext" ).toBool(); } bool MySettings::saveChatsContext() const { return getBasicSetting("saveChatsContext" ).toBool(); }
bool MySettings::serverChat() const { return getBasicSetting("serverChat" ).toBool(); } bool MySettings::serverChat() const { return getBasicSetting("serverChat" ).toBool(); }
int MySettings::networkPort() const { return getBasicSetting("networkPort" ).toInt(); } int MySettings::networkPort() const { return getBasicSetting("networkPort" ).toInt(); }
QString MySettings::userDefaultModel() const { return getBasicSetting("userDefaultModel" ).toString(); } QString MySettings::userDefaultModel() const { return getBasicSetting("userDefaultModel" ).toString(); }
QString MySettings::chatTheme() const { return getBasicSetting("chatTheme" ).toString(); } QString MySettings::lastVersionStarted() const { return getBasicSetting("lastVersionStarted" ).toString(); }
QString MySettings::fontSize() const { return getBasicSetting("fontSize" ).toString(); } int MySettings::localDocsChunkSize() const { return getBasicSetting("localdocs/chunkSize" ).toInt(); }
QString MySettings::lastVersionStarted() const { return getBasicSetting("lastVersionStarted" ).toString(); } int MySettings::localDocsRetrievalSize() const { return getBasicSetting("localdocs/retrievalSize" ).toInt(); }
int MySettings::localDocsChunkSize() const { return getBasicSetting("localdocs/chunkSize" ).toInt(); } bool MySettings::localDocsShowReferences() const { return getBasicSetting("localdocs/showReferences").toBool(); }
int MySettings::localDocsRetrievalSize() const { return getBasicSetting("localdocs/retrievalSize" ).toInt(); } QStringList MySettings::localDocsFileExtensions() const { return getBasicSetting("localdocs/fileExtensions").toStringList(); }
bool MySettings::localDocsShowReferences() const { return getBasicSetting("localdocs/showReferences").toBool(); } bool MySettings::localDocsUseRemoteEmbed() const { return getBasicSetting("localdocs/useRemoteEmbed").toBool(); }
QStringList MySettings::localDocsFileExtensions() const { return getBasicSetting("localdocs/fileExtensions").toStringList(); } QString MySettings::localDocsNomicAPIKey() const { return getBasicSetting("localdocs/nomicAPIKey" ).toString(); }
bool MySettings::localDocsUseRemoteEmbed() const { return getBasicSetting("localdocs/useRemoteEmbed").toBool(); } QString MySettings::localDocsEmbedDevice() const { return getBasicSetting("localdocs/embedDevice" ).toString(); }
QString MySettings::localDocsNomicAPIKey() const { return getBasicSetting("localdocs/nomicAPIKey" ).toString(); } QString MySettings::networkAttribution() const { return getBasicSetting("network/attribution" ).toString(); }
QString MySettings::localDocsEmbedDevice() const { return getBasicSetting("localdocs/embedDevice" ).toString(); }
QString MySettings::networkAttribution() const { return getBasicSetting("network/attribution" ).toString(); } ChatTheme MySettings::chatTheme() const { return ChatTheme (getEnumSetting("chatTheme", chatThemeNames)); }
SuggestionMode MySettings::suggestionMode() const { return getBasicSetting("suggestionMode").value<SuggestionMode>(); }; FontSize MySettings::fontSize() const { return FontSize (getEnumSetting("fontSize", fontSizeNames)); }
SuggestionMode MySettings::suggestionMode() const { return SuggestionMode(getEnumSetting("suggestionMode", suggestionModeNames)); }
void MySettings::setSaveChatsContext(bool value) { setBasicSetting("saveChatsContext", value); } void MySettings::setSaveChatsContext(bool value) { setBasicSetting("saveChatsContext", value); }
void MySettings::setServerChat(bool value) { setBasicSetting("serverChat", value); } void MySettings::setServerChat(bool value) { setBasicSetting("serverChat", value); }
void MySettings::setNetworkPort(int value) { setBasicSetting("networkPort", value); } void MySettings::setNetworkPort(int value) { setBasicSetting("networkPort", value); }
void MySettings::setUserDefaultModel(const QString &value) { setBasicSetting("userDefaultModel", value); } void MySettings::setUserDefaultModel(const QString &value) { setBasicSetting("userDefaultModel", value); }
void MySettings::setChatTheme(const QString &value) { setBasicSetting("chatTheme", value); }
void MySettings::setFontSize(const QString &value) { setBasicSetting("fontSize", value); }
void MySettings::setLastVersionStarted(const QString &value) { setBasicSetting("lastVersionStarted", value); } void MySettings::setLastVersionStarted(const QString &value) { setBasicSetting("lastVersionStarted", value); }
void MySettings::setLocalDocsChunkSize(int value) { setBasicSetting("localdocs/chunkSize", value, "localDocsChunkSize"); } void MySettings::setLocalDocsChunkSize(int value) { setBasicSetting("localdocs/chunkSize", value, "localDocsChunkSize"); }
void MySettings::setLocalDocsRetrievalSize(int value) { setBasicSetting("localdocs/retrievalSize", value, "localDocsRetrievalSize"); } void MySettings::setLocalDocsRetrievalSize(int value) { setBasicSetting("localdocs/retrievalSize", value, "localDocsRetrievalSize"); }
@ -463,7 +473,10 @@ void MySettings::setLocalDocsUseRemoteEmbed(bool value) { setBasic
void MySettings::setLocalDocsNomicAPIKey(const QString &value) { setBasicSetting("localdocs/nomicAPIKey", value, "localDocsNomicAPIKey"); } void MySettings::setLocalDocsNomicAPIKey(const QString &value) { setBasicSetting("localdocs/nomicAPIKey", value, "localDocsNomicAPIKey"); }
void MySettings::setLocalDocsEmbedDevice(const QString &value) { setBasicSetting("localdocs/embedDevice", value, "localDocsEmbedDevice"); } void MySettings::setLocalDocsEmbedDevice(const QString &value) { setBasicSetting("localdocs/embedDevice", value, "localDocsEmbedDevice"); }
void MySettings::setNetworkAttribution(const QString &value) { setBasicSetting("network/attribution", value, "networkAttribution"); } void MySettings::setNetworkAttribution(const QString &value) { setBasicSetting("network/attribution", value, "networkAttribution"); }
void MySettings::setSuggestionMode(SuggestionMode value) { setBasicSetting("suggestionMode", int(value)); }
void MySettings::setChatTheme(ChatTheme value) { setBasicSetting("chatTheme", chatThemeNames .value(int(value))); }
void MySettings::setFontSize(FontSize value) { setBasicSetting("fontSize", fontSizeNames .value(int(value))); }
void MySettings::setSuggestionMode(SuggestionMode value) { setBasicSetting("suggestionMode", suggestionModeNames.value(int(value))); }
QString MySettings::modelPath() QString MySettings::modelPath()
{ {

View File

@ -16,12 +16,29 @@
namespace MySettingsEnums { namespace MySettingsEnums {
Q_NAMESPACE Q_NAMESPACE
/* NOTE: values of these enums are used as indices for the corresponding combo boxes in
* ApplicationSettings.qml, as well as the corresponding name lists in mysettings.cpp */
enum class SuggestionMode { enum class SuggestionMode {
LocalDocsOnly = 0, LocalDocsOnly = 0,
On = 1, On = 1,
Off = 2, Off = 2,
}; };
Q_ENUM_NS(SuggestionMode) Q_ENUM_NS(SuggestionMode)
enum class ChatTheme {
Light = 0,
Dark = 1,
LegacyDark = 2,
};
Q_ENUM_NS(ChatTheme)
enum class FontSize {
Small = 0,
Medium = 1,
Large = 2,
};
Q_ENUM_NS(FontSize)
} }
using namespace MySettingsEnums; using namespace MySettingsEnums;
@ -33,10 +50,8 @@ class MySettings : public QObject
Q_PROPERTY(bool serverChat READ serverChat WRITE setServerChat NOTIFY serverChatChanged) Q_PROPERTY(bool serverChat READ serverChat WRITE setServerChat NOTIFY serverChatChanged)
Q_PROPERTY(QString modelPath READ modelPath WRITE setModelPath NOTIFY modelPathChanged) Q_PROPERTY(QString modelPath READ modelPath WRITE setModelPath NOTIFY modelPathChanged)
Q_PROPERTY(QString userDefaultModel READ userDefaultModel WRITE setUserDefaultModel NOTIFY userDefaultModelChanged) Q_PROPERTY(QString userDefaultModel READ userDefaultModel WRITE setUserDefaultModel NOTIFY userDefaultModelChanged)
// FIXME: This should be changed to an enum to allow translations to work Q_PROPERTY(ChatTheme chatTheme READ chatTheme WRITE setChatTheme NOTIFY chatThemeChanged)
Q_PROPERTY(QString chatTheme READ chatTheme WRITE setChatTheme NOTIFY chatThemeChanged) Q_PROPERTY(FontSize fontSize READ fontSize WRITE setFontSize NOTIFY fontSizeChanged)
// FIXME: This should be changed to an enum to allow translations to work
Q_PROPERTY(QString fontSize READ fontSize WRITE setFontSize NOTIFY fontSizeChanged)
Q_PROPERTY(QString languageAndLocale READ languageAndLocale WRITE setLanguageAndLocale NOTIFY languageAndLocaleChanged) Q_PROPERTY(QString languageAndLocale READ languageAndLocale WRITE setLanguageAndLocale NOTIFY languageAndLocaleChanged)
Q_PROPERTY(bool forceMetal READ forceMetal WRITE setForceMetal NOTIFY forceMetalChanged) Q_PROPERTY(bool forceMetal READ forceMetal WRITE setForceMetal NOTIFY forceMetalChanged)
Q_PROPERTY(QString lastVersionStarted READ lastVersionStarted WRITE setLastVersionStarted NOTIFY lastVersionStartedChanged) Q_PROPERTY(QString lastVersionStarted READ lastVersionStarted WRITE setLastVersionStarted NOTIFY lastVersionStartedChanged)
@ -131,10 +146,10 @@ public:
void setModelPath(const QString &value); void setModelPath(const QString &value);
QString userDefaultModel() const; QString userDefaultModel() const;
void setUserDefaultModel(const QString &value); void setUserDefaultModel(const QString &value);
QString chatTheme() const; ChatTheme chatTheme() const;
void setChatTheme(const QString &value); void setChatTheme(ChatTheme value);
QString fontSize() const; FontSize fontSize() const;
void setFontSize(const QString &value); void setFontSize(FontSize value);
bool forceMetal() const; bool forceMetal() const;
void setForceMetal(bool value); void setForceMetal(bool value);
QString device(); QString device();
@ -144,7 +159,7 @@ public:
int32_t gpuLayers() const; int32_t gpuLayers() const;
void setGpuLayers(int32_t value); void setGpuLayers(int32_t value);
SuggestionMode suggestionMode() const; SuggestionMode suggestionMode() const;
void setSuggestionMode(SuggestionMode mode); void setSuggestionMode(SuggestionMode value);
QString languageAndLocale() const; QString languageAndLocale() const;
void setLanguageAndLocale(const QString &bcp47Name = QString()); // called on startup with QString() void setLanguageAndLocale(const QString &bcp47Name = QString()); // called on startup with QString()
@ -239,6 +254,7 @@ private:
QVariant getBasicSetting(const QString &name) const; QVariant getBasicSetting(const QString &name) const;
void setBasicSetting(const QString &name, const QVariant &value, std::optional<QString> signal = std::nullopt); void setBasicSetting(const QString &name, const QVariant &value, std::optional<QString> signal = std::nullopt);
int getEnumSetting(const QString &setting, const QStringList &valueNames) const;
QVariant getModelSetting(const QString &name, const ModelInfo &info) const; QVariant getModelSetting(const QString &name, const ModelInfo &info) const;
void setModelSetting(const QString &name, const ModelInfo &info, const QVariant &value, bool force, void setModelSetting(const QString &name, const ModelInfo &info, const QVariant &value, bool force,
bool signal = false); bool signal = false);

View File

@ -107,11 +107,12 @@ MySettingsTab {
Layout.maximumWidth: 200 Layout.maximumWidth: 200
Layout.fillWidth: false Layout.fillWidth: false
Layout.alignment: Qt.AlignRight Layout.alignment: Qt.AlignRight
model: [qsTr("Dark"), qsTr("Light"), qsTr("LegacyDark")] // NOTE: indices match values of ChatTheme enum, keep them in sync
model: [qsTr("Light"), qsTr("Dark"), qsTr("LegacyDark")]
Accessible.name: themeLabel.text Accessible.name: themeLabel.text
Accessible.description: themeLabel.helpText Accessible.description: themeLabel.helpText
function updateModel() { function updateModel() {
themeBox.currentIndex = themeBox.indexOfValue(MySettings.chatTheme); themeBox.currentIndex = MySettings.chatTheme;
} }
Component.onCompleted: { Component.onCompleted: {
themeBox.updateModel() themeBox.updateModel()
@ -123,7 +124,7 @@ MySettingsTab {
} }
} }
onActivated: { onActivated: {
MySettings.chatTheme = themeBox.currentText MySettings.chatTheme = themeBox.currentIndex
} }
} }
MySettingsLabel { MySettingsLabel {
@ -141,11 +142,12 @@ MySettingsTab {
Layout.maximumWidth: 200 Layout.maximumWidth: 200
Layout.fillWidth: false Layout.fillWidth: false
Layout.alignment: Qt.AlignRight Layout.alignment: Qt.AlignRight
model: ["Small", "Medium", "Large"] // NOTE: indices match values of FontSize enum, keep them in sync
model: [qsTr("Small"), qsTr("Medium"), qsTr("Large")]
Accessible.name: fontLabel.text Accessible.name: fontLabel.text
Accessible.description: fontLabel.helpText Accessible.description: fontLabel.helpText
function updateModel() { function updateModel() {
fontBox.currentIndex = fontBox.indexOfValue(MySettings.fontSize); fontBox.currentIndex = MySettings.fontSize;
} }
Component.onCompleted: { Component.onCompleted: {
fontBox.updateModel() fontBox.updateModel()
@ -157,7 +159,7 @@ MySettingsTab {
} }
} }
onActivated: { onActivated: {
MySettings.fontSize = fontBox.currentText MySettings.fontSize = fontBox.currentIndex
} }
} }
MySettingsLabel { MySettingsLabel {
@ -271,6 +273,7 @@ MySettingsTab {
Layout.minimumWidth: 400 Layout.minimumWidth: 400
Layout.maximumWidth: 400 Layout.maximumWidth: 400
Layout.alignment: Qt.AlignRight Layout.alignment: Qt.AlignRight
// NOTE: indices match values of SuggestionMode enum, keep them in sync
model: [ qsTr("When chatting with LocalDocs"), qsTr("Whenever possible"), qsTr("Never") ] model: [ qsTr("When chatting with LocalDocs"), qsTr("Whenever possible"), qsTr("Never") ]
Accessible.name: suggestionModeLabel.text Accessible.name: suggestionModeLabel.text
Accessible.description: suggestionModeLabel.helpText Accessible.description: suggestionModeLabel.helpText

View File

@ -3,6 +3,7 @@ import QtQuick
import QtQuick.Controls import QtQuick.Controls
import QtQuick.Controls.Basic import QtQuick.Controls.Basic
import mysettings import mysettings
import mysettingsenums
Button { Button {
id: myButton id: myButton
@ -14,7 +15,7 @@ Button {
property color backgroundColor: theme.buttonBackground property color backgroundColor: theme.buttonBackground
property color backgroundColorHovered: theme.buttonBackgroundHovered property color backgroundColorHovered: theme.buttonBackgroundHovered
property real backgroundRadius: 10 property real backgroundRadius: 10
property real borderWidth: MySettings.chatTheme === "LegacyDark" ? 1 : 0 property real borderWidth: MySettings.chatTheme === MySettingsEnums.ChatTheme.LegacyDark ? 1 : 0
property color borderColor: theme.buttonBorder property color borderColor: theme.buttonBorder
property real fontPixelSize: theme.fontSizeLarge property real fontPixelSize: theme.fontSizeLarge
property bool fontPixelBold: false property bool fontPixelBold: false

File diff suppressed because it is too large Load Diff

View File

@ -485,6 +485,24 @@
<source>The size of text in the application.</source> <source>The size of text in the application.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="144"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="144"/>
<source>Small</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="144"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="144"/>
<source>Medium</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="144"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="144"/>
<source>Large</source>
<translation type="unfinished"></translation>
</message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="166"/> <location filename="../qml/ApplicationSettings.qml" line="166"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="166"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="166"/>
@ -1653,47 +1671,47 @@ model to get started</source>
<context> <context>
<name>ModelList</name> <name>ModelList</name>
<message> <message>
<location filename="../modellist.cpp" line="1537"/> <location filename="../modellist.cpp" line="1548"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source> <source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1556"/> <location filename="../modellist.cpp" line="1567"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1584"/> <location filename="../modellist.cpp" line="1595"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source> <source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1615"/> <location filename="../modellist.cpp" line="1626"/>
<source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1640"/> <location filename="../modellist.cpp" line="1651"/>
<source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1666"/> <location filename="../modellist.cpp" line="1677"/>
<source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1569"/> <location filename="../modellist.cpp" line="1580"/>
<source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source> <source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1596"/> <location filename="../modellist.cpp" line="1607"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source> <source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="2081"/> <location filename="../modellist.cpp" line="2092"/>
<source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source> <source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
@ -2345,6 +2363,14 @@ NOTE: By turning on this feature, you will be sending your data to the GPT4All O
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
</context> </context>
<context>
<name>QObject</name>
<message>
<location filename="../mysettings.cpp" line="119"/>
<source>Default</source>
<translation type="unfinished"></translation>
</message>
</context>
<context> <context>
<name>SettingsView</name> <name>SettingsView</name>
<message> <message>

View File

@ -538,6 +538,24 @@
reinstall.</source> reinstall.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="144"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="144"/>
<source>Small</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="144"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="144"/>
<source>Medium</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="144"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="144"/>
<source>Large</source>
<translation type="unfinished"></translation>
</message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="166"/> <location filename="../qml/ApplicationSettings.qml" line="166"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="166"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="166"/>
@ -1830,47 +1848,47 @@ model to get started</source>
OpenAI&lt;/strong&gt;&lt;br&gt; %1</translation> OpenAI&lt;/strong&gt;&lt;br&gt; %1</translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1537"/> <location filename="../modellist.cpp" line="1548"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source> <source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1556"/> <location filename="../modellist.cpp" line="1567"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1569"/> <location filename="../modellist.cpp" line="1580"/>
<source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source> <source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1584"/> <location filename="../modellist.cpp" line="1595"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source> <source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source>
<translation>&lt;strong&gt;Modelo ChatGPT GPT-4 de OpenAI&lt;/strong&gt;&lt;br&gt; %1 %2</translation> <translation>&lt;strong&gt;Modelo ChatGPT GPT-4 de OpenAI&lt;/strong&gt;&lt;br&gt; %1 %2</translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1596"/> <location filename="../modellist.cpp" line="1607"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source> <source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1615"/> <location filename="../modellist.cpp" line="1626"/>
<source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo Mistral Tiny&lt;/strong&gt;&lt;br&gt; %1</translation> <translation>&lt;strong&gt;Modelo Mistral Tiny&lt;/strong&gt;&lt;br&gt; %1</translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1640"/> <location filename="../modellist.cpp" line="1651"/>
<source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo Mistral Small&lt;/strong&gt;&lt;br&gt; %1</translation> <translation>&lt;strong&gt;Modelo Mistral Small&lt;/strong&gt;&lt;br&gt; %1</translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1666"/> <location filename="../modellist.cpp" line="1677"/>
<source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo Mistral Medium&lt;/strong&gt;&lt;br&gt; %1</translation> <translation>&lt;strong&gt;Modelo Mistral Medium&lt;/strong&gt;&lt;br&gt; %1</translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="2081"/> <location filename="../modellist.cpp" line="2092"/>
<source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source> <source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
@ -2657,6 +2675,14 @@ NOTE: By turning on this feature, you will be sending your data to the GPT4All O
<translation>Se muestra cuando la ventana emergente está ocupada</translation> <translation>Se muestra cuando la ventana emergente está ocupada</translation>
</message> </message>
</context> </context>
<context>
<name>QObject</name>
<message>
<location filename="../mysettings.cpp" line="119"/>
<source>Default</source>
<translation type="unfinished">Predeterminado</translation>
</message>
</context>
<context> <context>
<name>SettingsView</name> <name>SettingsView</name>
<message> <message>

View File

@ -528,6 +528,24 @@
<source>The size of text in the application.</source> <source>The size of text in the application.</source>
<translation></translation> <translation></translation>
</message> </message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="144"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="144"/>
<source>Small</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="144"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="144"/>
<source>Medium</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="144"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="144"/>
<source>Large</source>
<translation type="unfinished"></translation>
</message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="166"/> <location filename="../qml/ApplicationSettings.qml" line="166"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="166"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="166"/>
@ -1744,32 +1762,32 @@ model to get started</source>
<context> <context>
<name>ModelList</name> <name>ModelList</name>
<message> <message>
<location filename="../modellist.cpp" line="1537"/> <location filename="../modellist.cpp" line="1548"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source> <source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt; OpenAI API &lt;/li&gt;&lt;li&gt; OpenAI&lt;/li&gt;&lt;li&gt; API &lt;/li&gt;&lt;li&gt; OpenAI &lt;/li&gt;&lt;li&gt;&lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt; API &lt;/a&gt;&lt;/li&gt;</translation> <translation>&lt;ul&gt;&lt;li&gt; OpenAI API &lt;/li&gt;&lt;li&gt; OpenAI&lt;/li&gt;&lt;li&gt; API &lt;/li&gt;&lt;li&gt; OpenAI &lt;/li&gt;&lt;li&gt;&lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt; API &lt;/a&gt;&lt;/li&gt;</translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1556"/> <location filename="../modellist.cpp" line="1567"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1584"/> <location filename="../modellist.cpp" line="1595"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source> <source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1615"/> <location filename="../modellist.cpp" line="1626"/>
<source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1640"/> <location filename="../modellist.cpp" line="1651"/>
<source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1666"/> <location filename="../modellist.cpp" line="1677"/>
<source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source> <source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
@ -1778,7 +1796,7 @@ model to get started</source>
<translation type="vanished">&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt;</translation> <translation type="vanished">&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt;</translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1569"/> <location filename="../modellist.cpp" line="1580"/>
<source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source> <source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source>
<translation>&lt;br&gt;&lt;br&gt;&lt;i&gt;* 使ChatGPT-4OpenAI付款API密钥访问OpenAI获取更多信息</translation> <translation>&lt;br&gt;&lt;br&gt;&lt;i&gt;* 使ChatGPT-4OpenAI付款API密钥访问OpenAI获取更多信息</translation>
</message> </message>
@ -1787,7 +1805,7 @@ model to get started</source>
<translation type="vanished">&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt;</translation> <translation type="vanished">&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt;</translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="1596"/> <location filename="../modellist.cpp" line="1607"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source> <source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</translation> <translation>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</translation>
</message> </message>
@ -1804,7 +1822,7 @@ model to get started</source>
<translation type="vanished">&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt;</translation> <translation type="vanished">&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt;</translation>
</message> </message>
<message> <message>
<location filename="../modellist.cpp" line="2081"/> <location filename="../modellist.cpp" line="2092"/>
<source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source> <source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</translation> <translation>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</translation>
</message> </message>
@ -2492,6 +2510,14 @@ NOTE: By turning on this feature, you will be sending your data to the GPT4All O
<translation></translation> <translation></translation>
</message> </message>
</context> </context>
<context>
<name>QObject</name>
<message>
<location filename="../mysettings.cpp" line="119"/>
<source>Default</source>
<translation type="unfinished"></translation>
</message>
</context>
<context> <context>
<name>SettingsView</name> <name>SettingsView</name>
<message> <message>