diff --git a/CMakeLists.txt b/CMakeLists.txt
index 0580d534..f3d8958e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -55,6 +55,7 @@ add_subdirectory(llama.cpp)
qt_add_executable(chat
main.cpp
download.h download.cpp
+ network.h network.cpp
gptj.h gptj.cpp
llamamodel.h llamamodel.cpp
llama.cpp/examples/common.cpp
@@ -66,14 +67,22 @@ qt_add_executable(chat
qt_add_qml_module(chat
URI gpt4all-chat
VERSION 1.0
- QML_FILES main.qml qml/ModelDownloaderDialog.qml
+ QML_FILES
+ main.qml
+ qml/NetworkDialog.qml
+ qml/ModelDownloaderDialog.qml
+ qml/ThumbsDownDialog.qml
RESOURCES
icons/send_message.svg
icons/stop_generating.svg
icons/regenerate.svg
- icons/logo.svg
icons/copy.svg
icons/settings.svg
+ icons/edit.svg
+ icons/network.svg
+ icons/thumbs_up.svg
+ icons/thumbs_down.svg
+ icons/logo.svg
icons/logo-16.png
icons/logo-32.png
icons/logo-48.png
diff --git a/icons/edit.svg b/icons/edit.svg
new file mode 100644
index 00000000..9820173b
--- /dev/null
+++ b/icons/edit.svg
@@ -0,0 +1,5 @@
+
+
diff --git a/icons/network.svg b/icons/network.svg
new file mode 100644
index 00000000..266f13d6
--- /dev/null
+++ b/icons/network.svg
@@ -0,0 +1 @@
+
diff --git a/icons/thumbs_down.svg b/icons/thumbs_down.svg
new file mode 100644
index 00000000..b01a82d3
--- /dev/null
+++ b/icons/thumbs_down.svg
@@ -0,0 +1,5 @@
+
+
diff --git a/icons/thumbs_up.svg b/icons/thumbs_up.svg
new file mode 100644
index 00000000..cd5efcd2
--- /dev/null
+++ b/icons/thumbs_up.svg
@@ -0,0 +1,5 @@
+
+
diff --git a/main.cpp b/main.cpp
index 098f67ca..0f5d4997 100644
--- a/main.cpp
+++ b/main.cpp
@@ -6,6 +6,7 @@
#include "llm.h"
#include "download.h"
+#include "network.h"
#include "config.h"
int main(int argc, char *argv[])
@@ -19,7 +20,7 @@ int main(int argc, char *argv[])
QQmlApplicationEngine engine;
qmlRegisterSingletonInstance("llm", 1, 0, "LLM", LLM::globalInstance());
qmlRegisterSingletonInstance("download", 1, 0, "Download", Download::globalInstance());
-
+ qmlRegisterSingletonInstance("network", 1, 0, "Network", Network::globalInstance());
const QUrl url(u"qrc:/gpt4all-chat/main.qml"_qs);
QObject::connect(&engine, &QQmlApplicationEngine::objectCreated,
diff --git a/main.qml b/main.qml
index 51f8ebdb..1cbc60e8 100644
--- a/main.qml
+++ b/main.qml
@@ -4,6 +4,7 @@ import QtQuick.Controls
import QtQuick.Controls.Basic
import QtQuick.Layouts
import llm
+import network
Window {
id: window
@@ -438,9 +439,60 @@ Window {
}
}
+ NetworkDialog {
+ id: networkDialog
+ anchors.centerIn: parent
+ Item {
+ Accessible.role: Accessible.Dialog
+ Accessible.name: qsTr("Network dialog")
+ Accessible.description: qsTr("Dialog for opt-in to sharing feedback/conversations")
+ }
+ }
+
+ Button {
+ id: networkButton
+ anchors.right: parent.right
+ anchors.top: parent.top
+ anchors.topMargin: 20
+ anchors.rightMargin: 30
+ width: 60
+ height: 60
+ z: 200
+ padding: 15
+
+ Accessible.role: Accessible.Button
+ Accessible.name: qsTr("Network button")
+ Accessible.description: qsTr("Reveals a dialogue where you can opt-in for sharing data over network")
+
+ background: Item {
+ anchors.fill: parent
+ Rectangle {
+ anchors.fill: parent
+ color: "transparent"
+ visible: Network.isActive
+ border.color: "#7d7d8e"
+ border.width: 1
+ radius: 10
+ }
+ Image {
+ anchors.centerIn: parent
+ width: 50
+ height: 50
+ source: "qrc:/gpt4all-chat/icons/network.svg"
+ }
+ }
+
+ onClicked: {
+ if (Network.isActive)
+ Network.isActive = false
+ else
+ networkDialog.open();
+ }
+ }
+
Button {
id: settingsButton
- anchors.right: parent.right
+ anchors.right: networkButton.left
anchors.top: parent.top
anchors.topMargin: 30
anchors.rightMargin: 30
@@ -525,17 +577,7 @@ Window {
}
onClicked: {
- var conversation = "";
- for (var i = 0; i < chatModel.count; i++) {
- var item = chatModel.get(i)
- var string = item.name;
- if (item.currentResponse)
- string += LLM.response
- else
- string += chatModel.get(i).value
- string += "\n"
- conversation += string
- }
+ var conversation = getConversation()
copyEdit.text = conversation
copyEdit.selectAll()
copyEdit.copy()
@@ -549,6 +591,49 @@ Window {
}
}
+ function getConversation() {
+ var conversation = "";
+ for (var i = 0; i < chatModel.count; i++) {
+ var item = chatModel.get(i)
+ var string = item.name;
+ var isResponse = item.name === qsTr("Response: ")
+ if (item.currentResponse)
+ string += LLM.response
+ else
+ string += chatModel.get(i).value
+ if (isResponse && item.stopped)
+ string += " "
+ string += "\n"
+ conversation += string
+ }
+ return conversation
+ }
+
+ function getConversationJson() {
+ var str = "{\"conversation\": [";
+ for (var i = 0; i < chatModel.count; i++) {
+ var item = chatModel.get(i)
+ var isResponse = item.name === qsTr("Response: ")
+ str += "{\"content\": \"";
+ if (item.currentResponse)
+ str += LLM.response + "\""
+ else
+ str += item.value + "\""
+ str += ", \"role\": \"" + (isResponse ? "assistant" : "user") + "\"";
+ if (isResponse && item.thumbsUpState !== item.thumbsDownState)
+ str += ", \"rating\": \"" + (item.thumbsUpState ? "positive" : "negative") + "\"";
+ if (isResponse && item.newResponse !== "")
+ str += ", \"edited_content\": \"" + item.newResponse + "\"";
+ if (isResponse && item.stopped)
+ str += ", \"stopped\": \"true\""
+ if (!isResponse)
+ str += "},"
+ else
+ str += ((i < chatModel.count - 1) ? "}," : "}")
+ }
+ return str + "]}"
+ }
+
Button {
id: resetContextButton
anchors.right: copyButton.left
@@ -586,6 +671,7 @@ Window {
anchors.centerIn: parent
modal: false
opacity: 0.9
+ padding: 20
Text {
horizontalAlignment: Text.AlignJustify
text: qsTr("ERROR: Update system could not find the MaintenanceTool used
@@ -602,7 +688,6 @@ Window {
}
background: Rectangle {
anchors.fill: parent
- anchors.margins: -20
color: "#202123"
border.width: 1
border.color: "white"
@@ -793,7 +878,6 @@ Window {
wrapMode: Text.WordWrap
focus: false
readOnly: true
- padding: 20
font.pixelSize: 24
cursorVisible: currentResponse ? (LLM.response !== "" ? LLM.responseInProgress : false) : false
cursorPosition: text.length
@@ -805,7 +889,10 @@ Window {
Accessible.name: name
Accessible.description: name === qsTr("Response: ") ? "The response by the model" : "The prompt by the user"
+ topPadding: 20
+ bottomPadding: 20
leftPadding: 100
+ rightPadding: 100
BusyIndicator {
anchors.left: parent.left
@@ -836,6 +923,88 @@ Window {
color: "white"
}
}
+
+ ThumbsDownDialog {
+ id: thumbsDownDialog
+ property point globalPoint: mapFromItem(window,
+ window.width / 2 - width / 2,
+ window.height / 2 - height / 2)
+ x: globalPoint.x
+ y: globalPoint.y
+ property string text: currentResponse ? LLM.response : (value ? value : "")
+ response: newResponse === "" ? text : newResponse
+ onAccepted: {
+ var responseHasChanged = response !== text && response !== newResponse
+ if (thumbsDownState && !thumbsUpState && !responseHasChanged)
+ return
+
+ newResponse = response
+ thumbsDownState = true
+ thumbsUpState = false
+ Network.sendConversation(getConversationJson());
+ }
+ }
+
+ Column {
+ visible: name === qsTr("Response: ") &&
+ (!currentResponse || !LLM.responseInProgress) && Network.isActive
+ anchors.right: parent.right
+ anchors.rightMargin: 20
+ anchors.top: parent.top
+ anchors.topMargin: 20
+ spacing: 10
+
+ Item {
+ width: childrenRect.width
+ height: childrenRect.height
+ Button {
+ id: thumbsUp
+ width: 30
+ height: 30
+ opacity: thumbsUpState || thumbsUpState == thumbsDownState ? 1.0 : 0.2
+ background: Image {
+ anchors.fill: parent
+ source: "qrc:/gpt4all-chat/icons/thumbs_up.svg"
+ }
+ onClicked: {
+ if (thumbsUpState && !thumbsDownState)
+ return
+
+ newResponse = ""
+ thumbsUpState = true
+ thumbsDownState = false
+ Network.sendConversation(getConversationJson());
+ }
+ }
+
+ Button {
+ id: thumbsDown
+ anchors.top: thumbsUp.top
+ anchors.topMargin: 10
+ anchors.left: thumbsUp.right
+ anchors.leftMargin: 2
+ width: 30
+ height: 30
+ checked: thumbsDownState
+ opacity: thumbsDownState || thumbsUpState == thumbsDownState ? 1.0 : 0.2
+ transform: [
+ Matrix4x4 {
+ matrix: Qt.matrix4x4(-1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1)
+ },
+ Translate {
+ x: thumbsDown.width
+ }
+ ]
+ background: Image {
+ anchors.fill: parent
+ source: "qrc:/gpt4all-chat/icons/thumbs_down.svg"
+ }
+ onClicked: {
+ thumbsDownDialog.open()
+ }
+ }
+ }
+ }
}
property bool shouldAutoScroll: true
@@ -880,15 +1049,22 @@ Window {
}
leftPadding: 50
onClicked: {
- if (LLM.responseInProgress)
+ if (chatModel.count)
+ var listElement = chatModel.get(chatModel.count - 1)
+
+ if (LLM.responseInProgress) {
+ listElement.stopped = true
LLM.stopGenerating()
- else {
+ } else {
LLM.regenerateResponse()
if (chatModel.count) {
- var listElement = chatModel.get(chatModel.count - 1)
if (listElement.name === qsTr("Response: ")) {
listElement.currentResponse = true
+ listElement.stopped = false
listElement.value = LLM.response
+ listElement.thumbsUpState = false
+ listElement.thumbsDownState = false
+ listElement.newResponse = ""
LLM.prompt(listElement.prompt, settings.promptTemplate, settings.maxLength,
settings.topK, settings.topP, settings.temperature,
settings.promptBatchSize)
@@ -956,10 +1132,14 @@ Window {
listElement.currentResponse = false
listElement.value = LLM.response
}
-
var prompt = textInput.text + "\n"
- chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text})
- chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt})
+ chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false,
+ "value": textInput.text})
+ chatModel.append({"id": chatModel.count, "name": qsTr("Response: "),
+ "currentResponse": true, "value": "", "stopped": false,
+ "thumbsUpState": false, "thumbsDownState": false,
+ "newResponse": "",
+ "prompt": prompt})
LLM.resetResponse()
LLM.prompt(prompt, settings.promptTemplate, settings.maxLength, settings.topK,
settings.topP, settings.temperature, settings.promptBatchSize)
diff --git a/network.cpp b/network.cpp
new file mode 100644
index 00000000..f5bc331d
--- /dev/null
+++ b/network.cpp
@@ -0,0 +1,126 @@
+#include "network.h"
+#include "llm.h"
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+//#define DEBUG
+
+class MyNetwork: public Network { };
+Q_GLOBAL_STATIC(MyNetwork, networkInstance)
+Network *Network::globalInstance()
+{
+ return networkInstance();
+}
+
+Network::Network()
+ : QObject{nullptr}
+ , m_isActive(false)
+{
+ QSettings settings;
+ settings.sync();
+ m_isActive = settings.value("network/isActive", false).toBool();
+ m_uniqueId = settings.value("uniqueId", generateUniqueId()).toString();
+ settings.setValue("uniqueId", m_uniqueId);
+ settings.sync();
+ emit activeChanged();
+}
+
+void Network::setActive(bool b)
+{
+ QSettings settings;
+ settings.setValue("network/isActive", b);
+ settings.sync();
+ m_isActive = b;
+ emit activeChanged();
+}
+
+QString Network::generateUniqueId() const
+{
+ return QUuid::createUuid().toString(QUuid::WithoutBraces);
+}
+
+bool Network::packageAndSendJson(const QString &json)
+{
+ if (!m_isActive)
+ return false;
+
+ QJsonParseError err;
+ QJsonDocument doc = QJsonDocument::fromJson(json.toUtf8(), &err);
+ if (err.error != QJsonParseError::NoError) {
+ qDebug() << "Couldn't parse: " << json << err.errorString();
+ return false;
+ }
+
+ Q_ASSERT(doc.isObject());
+ QJsonObject object = doc.object();
+ object.insert("source", "gpt4all-chat");
+ object.insert("agent_id", LLM::globalInstance()->modelName());
+ object.insert("submitter_id", m_uniqueId);
+
+ QSettings settings;
+ settings.sync();
+ QString attribution = settings.value("attribution", QString()).toString();
+ if (!attribution.isEmpty())
+ object.insert("attribution", attribution);
+
+ QJsonDocument newDoc;
+ newDoc.setObject(object);
+
+#if defined(DEBUG)
+ printf("%s", qPrintable(newDoc.toJson(QJsonDocument::Indented)));
+ fflush(stdout);
+#endif
+
+ QUrl jsonUrl("http://localhost/v1/ingest/chat");
+ QNetworkRequest request(jsonUrl);
+ QByteArray body(newDoc.toJson());
+ request.setHeader(QNetworkRequest::ContentTypeHeader, "application/json");
+ QNetworkReply *jsonReply = m_networkManager.post(request, body);
+ connect(jsonReply, &QNetworkReply::finished, this, &Network::handleJsonUploadFinished);
+ m_activeUploads.append(jsonReply);
+ return true;
+}
+
+void Network::handleJsonUploadFinished()
+{
+ QNetworkReply *jsonReply = qobject_cast(sender());
+ if (!jsonReply)
+ return;
+
+ m_activeUploads.removeAll(jsonReply);
+
+ QVariant response = jsonReply->attribute(QNetworkRequest::HttpStatusCodeAttribute);
+ Q_ASSERT(response.isValid());
+ bool ok;
+ int code = response.toInt(&ok);
+ if (!ok)
+ qWarning() << "ERROR: Invalid response.";
+ if (code != 200)
+ qWarning() << "ERROR: response != 200 code:" << code;
+
+ QByteArray jsonData = jsonReply->readAll();
+ QJsonParseError err;
+ QJsonDocument document = QJsonDocument::fromJson(jsonData, &err);
+ if (err.error != QJsonParseError::NoError) {
+ qDebug() << "ERROR: Couldn't parse: " << jsonData << err.errorString();
+ return;
+ }
+
+#if defined(DEBUG)
+ printf("%s", qPrintable(document.toJson(QJsonDocument::Indented)));
+ fflush(stdout);
+#endif
+
+ jsonReply->deleteLater();
+}
+
+bool Network::sendConversation(const QString &conversation)
+{
+ return packageAndSendJson(conversation);
+}
diff --git a/network.h b/network.h
new file mode 100644
index 00000000..bc5bd233
--- /dev/null
+++ b/network.h
@@ -0,0 +1,43 @@
+#ifndef NETWORK_H
+#define NETWORK_H
+
+#include
+#include
+#include
+
+class Network : public QObject
+{
+ Q_OBJECT
+ Q_PROPERTY(bool isActive READ isActive WRITE setActive NOTIFY activeChanged)
+public:
+
+ static Network *globalInstance();
+
+ bool isActive() const { return m_isActive; }
+ void setActive(bool b);
+
+ Q_INVOKABLE QString generateUniqueId() const;
+ Q_INVOKABLE bool sendConversation(const QString &conversation);
+
+Q_SIGNALS:
+ void activeChanged();
+
+private Q_SLOTS:
+ void handleJsonUploadFinished();
+
+private:
+ bool packageAndSendJson(const QString &json);
+
+private:
+ bool m_isActive;
+ QString m_uniqueId;
+ QNetworkAccessManager m_networkManager;
+ QVector m_activeUploads;
+
+private:
+ explicit Network();
+ ~Network() {}
+ friend class MyNetwork;
+};
+
+#endif // LLM_H
diff --git a/qml/NetworkDialog.qml b/qml/NetworkDialog.qml
new file mode 100644
index 00000000..0a589e65
--- /dev/null
+++ b/qml/NetworkDialog.qml
@@ -0,0 +1,158 @@
+import QtCore
+import QtQuick
+import QtQuick.Controls
+import QtQuick.Layouts
+import download
+import network
+import llm
+
+Dialog {
+ id: networkDialog
+ anchors.centerIn: parent
+ modal: true
+ opacity: 0.9
+ padding: 20
+ width: 1024
+ height: column.height + dialogBox.height + 20
+
+ Settings {
+ id: settings
+ property string attribution: ""
+ }
+
+ Component.onDestruction: {
+ settings.sync()
+ }
+
+ Column {
+ id: column
+ spacing: 20
+ Item {
+ width: childrenRect.width
+ height: childrenRect.height
+ Image {
+ id: img
+ anchors.top: parent.top
+ anchors.left: parent.left
+ width: 60
+ height: 60
+ source: "qrc:/gpt4all-chat/icons/logo.svg"
+ }
+ Text {
+ anchors.left: img.right
+ anchors.leftMargin: 30
+ anchors.verticalCenter: img.verticalCenter
+ text: qsTr("Contribute data to the GPT4All Opensource Datalake.")
+ color: "#d1d5db"
+ }
+ }
+
+ ScrollView {
+ clip: true
+ height: 300
+ width: 1024 - 40
+ ScrollBar.vertical.policy: ScrollBar.AlwaysOn
+ ScrollBar.horizontal.policy: ScrollBar.AlwaysOff
+
+ TextArea {
+ id: textOptIn
+ wrapMode: Text.Wrap
+ width: 1024 - 40
+ padding: 20
+ text: qsTr("By enabling this feature, you will be able to participate in the democratic process of training a large language model by contributing data for future model improvements.
+
+When a GPT4All model responds to you and you have opted-in, you can like/dislike its response. If you dislike a response, you can suggest an alternative response. This data will be collected and aggregated in the GPT4All Datalake.
+
+NOTE: By turning on this feature, you will be sending your data to the GPT4All Open Source Datalake. You should have no expectation of chat privacy when this feature is enabled. You should; however, have an expectation of an optional attribution if you wish. Your chat data will be openly available for anyone to download and will be used by Nomic AI to improve future GPT4All models. Nomic AI will retain all attribution information attached to your data and you will be credited as a contributor to any GPT4All model release that uses your data!")
+ color: "#d1d5db"
+ focus: false
+ readOnly: true
+ Accessible.role: Accessible.Paragraph
+ Accessible.name: qsTr("Terms for opt-in")
+ Accessible.description: qsTr("Describes what will happen when you opt-in")
+ background: Rectangle {
+ color: "#343541"
+ radius: 10
+ }
+ }
+ }
+
+ TextField {
+ id: attribution
+ color: "#dadadc"
+ padding: 20
+ width: parent.width
+ text: settings.attribution
+ font.pixelSize: 24
+ placeholderText: qsTr("Please provide a name for attribution (optional)")
+ placeholderTextColor: "#7d7d8e"
+ background: Rectangle {
+ color: "#40414f"
+ radius: 10
+ }
+ Accessible.role: Accessible.EditableText
+ Accessible.name: qsTr("Attribution (optional)")
+ Accessible.description: qsTr("Textfield for providing attribution")
+ onEditingFinished: {
+ settings.attribution = attribution.text;
+ settings.sync();
+ }
+ }
+ }
+
+ background: Rectangle {
+ anchors.fill: parent
+ color: "#202123"
+ border.width: 1
+ border.color: "white"
+ radius: 10
+ }
+
+ footer: DialogButtonBox {
+ id: dialogBox
+ padding: 20
+ alignment: Qt.AlignRight
+ spacing: 10
+ Button {
+ text: qsTr("Enable")
+ background: Rectangle {
+ border.color: "#7d7d8e"
+ border.width: 1
+ radius: 10
+ color: "#343541"
+ }
+ Accessible.role: Accessible.Button
+ Accessible.name: text
+ Accessible.description: qsTr("Enable opt-in button")
+
+ padding: 15
+ DialogButtonBox.buttonRole: DialogButtonBox.AcceptRole
+ }
+ Button {
+ text: qsTr("Cancel")
+ background: Rectangle {
+ border.color: "#7d7d8e"
+ border.width: 1
+ radius: 10
+ color: "#343541"
+ }
+ Accessible.role: Accessible.Button
+ Accessible.name: text
+ Accessible.description: qsTr("Cancel opt-in button")
+
+ padding: 15
+ DialogButtonBox.buttonRole: DialogButtonBox.RejectRole
+ }
+ background: Rectangle {
+ color: "transparent"
+ }
+ }
+
+ onAccepted: {
+ Network.isActive = true;
+ }
+
+ onRejected: {
+ Network.isActive = false;
+ }
+}
diff --git a/qml/ThumbsDownDialog.qml b/qml/ThumbsDownDialog.qml
new file mode 100644
index 00000000..7a381332
--- /dev/null
+++ b/qml/ThumbsDownDialog.qml
@@ -0,0 +1,102 @@
+import QtCore
+import QtQuick
+import QtQuick.Controls
+import QtQuick.Layouts
+import download
+import network
+import llm
+
+Dialog {
+ id: thumbsDownDialog
+ modal: true
+ opacity: 0.9
+ padding: 20
+ width: 900
+ property alias response: thumbsDownNewResponse.text
+ Column {
+ anchors.fill: parent
+ spacing: 20
+ Item {
+ width: childrenRect.width
+ height: childrenRect.height
+ Image {
+ id: img
+ anchors.top: parent.top
+ anchors.left: parent.left
+ width: 60
+ height: 60
+ source: "qrc:/gpt4all-chat/icons/thumbs_down.svg"
+ }
+ Text {
+ anchors.left: img.right
+ anchors.leftMargin: 30
+ anchors.verticalCenter: img.verticalCenter
+ text: qsTr("Provide feedback for negative rating")
+ color: "#d1d5db"
+ }
+ }
+
+ ScrollView {
+ clip: true
+ height: 300
+ width: parent.width
+ ScrollBar.vertical.policy: ScrollBar.AlwaysOn
+ ScrollBar.horizontal.policy: ScrollBar.AlwaysOff
+
+ TextArea {
+ id: thumbsDownNewResponse
+ color: "#dadadc"
+ padding: 20
+ width: parent.width
+ height: 300
+ wrapMode: Text.Wrap
+ font.pixelSize: 24
+ placeholderText: qsTr("Please provide a better response...")
+ placeholderTextColor: "#7d7d8e"
+ background: Rectangle {
+ color: "#40414f"
+ radius: 10
+ }
+ }
+ }
+ }
+
+ background: Rectangle {
+ anchors.fill: parent
+ color: "#202123"
+ border.width: 1
+ border.color: "white"
+ radius: 10
+ }
+
+ footer: DialogButtonBox {
+ padding: 20
+ alignment: Qt.AlignRight
+ spacing: 10
+ Button {
+ text: qsTr("Submit")
+ background: Rectangle {
+ border.color: "#7d7d8e"
+ border.width: 1
+ radius: 10
+ color: "#343541"
+ }
+ padding: 15
+ DialogButtonBox.buttonRole: DialogButtonBox.AcceptRole
+ }
+ Button {
+ text: qsTr("Cancel")
+ background: Rectangle {
+ border.color: "#7d7d8e"
+ border.width: 1
+ radius: 10
+ color: "#343541"
+ }
+ padding: 15
+ DialogButtonBox.buttonRole: DialogButtonBox.RejectRole
+ }
+ background: Rectangle {
+ color: "transparent"
+ }
+ }
+}
\ No newline at end of file