New thumbs up/down support for gpt4all-datalake.

pull/520/head
Adam Treat 1 year ago
parent 993a43d33a
commit 1f65e381ee

@ -55,6 +55,7 @@ add_subdirectory(llama.cpp)
qt_add_executable(chat
main.cpp
download.h download.cpp
network.h network.cpp
gptj.h gptj.cpp
llamamodel.h llamamodel.cpp
llama.cpp/examples/common.cpp
@ -66,14 +67,22 @@ qt_add_executable(chat
qt_add_qml_module(chat
URI gpt4all-chat
VERSION 1.0
QML_FILES main.qml qml/ModelDownloaderDialog.qml
QML_FILES
main.qml
qml/NetworkDialog.qml
qml/ModelDownloaderDialog.qml
qml/ThumbsDownDialog.qml
RESOURCES
icons/send_message.svg
icons/stop_generating.svg
icons/regenerate.svg
icons/logo.svg
icons/copy.svg
icons/settings.svg
icons/edit.svg
icons/network.svg
icons/thumbs_up.svg
icons/thumbs_down.svg
icons/logo.svg
icons/logo-16.png
icons/logo-32.png
icons/logo-48.png

@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="#7d7d8e" viewBox="0 0 576 512"><path d="M402.6 83.2l90.2 90.2c3.8 3.8 3.8 10 0 13.8L274.4 405.6l-92.8 10.3c-12.4 1.4-22.9-9.1-21.5-21.5l10.3-92.8L388.8 83.2c3.8-3.8 10-3.8 13.8 0zm162-22.9l-48.8-48.8c-15.2-15.2-39.9-15.2-55.2 0l-35.4 35.4c-3.8 3.8-3.8 10 0 13.8l90.2 90.2c3.8 3.8 10 3.8 13.8 0l35.4-35.4c15.2-15.3 15.2-40 0-55.2zM384 346.2V448H64V128h229.8c3.2 0 6.2-1.3 8.5-3.5l40-40c7.6-7.6 2.2-20.5-8.5-20.5H48C21.5 64 0 85.5 0 112v352c0 26.5 21.5 48 48 48h352c26.5 0 48-21.5 48-48V306.2c0-10.7-12.9-16-20.5-8.5l-40 40c-2.2 2.3-3.5 5.3-3.5 8.5z"/></svg>
<!--
Font Awesome Free 5.2.0 by @fontawesome - https://fontawesome.com
License - https://fontawesome.com/license (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
-->

After

Width:  |  Height:  |  Size: 778 B

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="#7d7d8e" viewBox="0 0 22 22"><g transform="matrix(1.37217 0 0 1.37217-27.479-15.472)"><path d="m28.04 13.847c-2.891 0-5.608 1.126-7.652 3.17l1.167 1.167c1.732-1.732 4.04-2.686 6.485-2.686 2.45 0 4.753.954 6.485 2.686l1.167-1.167c-2.044-2.044-4.761-3.17-7.652-3.17"/><path d="m22.466 19.09l1.167 1.167c1.178-1.178 2.745-1.828 4.412-1.828 1.667 0 3.233.649 4.412 1.828l1.167-1.167c-1.49-1.49-3.471-2.311-5.579-2.311-2.108 0-4.089.821-5.579 2.311"/><path d="m24.541 21.17l1.167 1.167c.624-.624 1.454-.968 2.337-.968.883 0 1.712.344 2.337.968l1.167-1.167c-.936-.936-2.18-1.451-3.504-1.451-1.324 0-2.568.515-3.504 1.451"/><path d="m28.04 22.994c-.429 0-.858.164-1.185.491-.011.011-.022.023-.033.035l1.218 1.222 1.221-1.218c-.012-.013-.024-.026-.036-.038-.327-.327-.756-.491-1.185-.491"/></g></svg>

After

Width:  |  Height:  |  Size: 839 B

@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="#7d7d8e" viewBox="0 0 512 512"><path d="M0 56v240c0 13.255 10.745 24 24 24h80c13.255 0 24-10.745 24-24V56c0-13.255-10.745-24-24-24H24C10.745 32 0 42.745 0 56zm40 200c0-13.255 10.745-24 24-24s24 10.745 24 24-10.745 24-24 24-24-10.745-24-24zm272 256c-20.183 0-29.485-39.293-33.931-57.795-5.206-21.666-10.589-44.07-25.393-58.902-32.469-32.524-49.503-73.967-89.117-113.111a11.98 11.98 0 0 1-3.558-8.521V59.901c0-6.541 5.243-11.878 11.783-11.998 15.831-.29 36.694-9.079 52.651-16.178C256.189 17.598 295.709.017 343.995 0h2.844c42.777 0 93.363.413 113.774 29.737 8.392 12.057 10.446 27.034 6.148 44.632 16.312 17.053 25.063 48.863 16.382 74.757 17.544 23.432 19.143 56.132 9.308 79.469l.11.11c11.893 11.949 19.523 31.259 19.439 49.197-.156 30.352-26.157 58.098-59.553 58.098H350.723C358.03 364.34 384 388.132 384 430.548 384 504 336 512 312 512z"/></svg>
<!--
Font Awesome Free 5.2.0 by @fontawesome - https://fontawesome.com
License - https://fontawesome.com/license (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
-->

After

Width:  |  Height:  |  Size: 1.0 KiB

@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="#7d7d8e" viewBox="0 0 512 512"><path d="M104 224H24c-13.255 0-24 10.745-24 24v240c0 13.255 10.745 24 24 24h80c13.255 0 24-10.745 24-24V248c0-13.255-10.745-24-24-24zM64 472c-13.255 0-24-10.745-24-24s10.745-24 24-24 24 10.745 24 24-10.745 24-24 24zM384 81.452c0 42.416-25.97 66.208-33.277 94.548h101.723c33.397 0 59.397 27.746 59.553 58.098.084 17.938-7.546 37.249-19.439 49.197l-.11.11c9.836 23.337 8.237 56.037-9.308 79.469 8.681 25.895-.069 57.704-16.382 74.757 4.298 17.598 2.244 32.575-6.148 44.632C440.202 511.587 389.616 512 346.839 512l-2.845-.001c-48.287-.017-87.806-17.598-119.56-31.725-15.957-7.099-36.821-15.887-52.651-16.178-6.54-.12-11.783-5.457-11.783-11.998v-213.77c0-3.2 1.282-6.271 3.558-8.521 39.614-39.144 56.648-80.587 89.117-113.111 14.804-14.832 20.188-37.236 25.393-58.902C282.515 39.293 291.817 0 312 0c24 0 72 8 72 81.452z"/></svg>
<!--
Font Awesome Free 5.2.0 by @fontawesome - https://fontawesome.com
License - https://fontawesome.com/license (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
-->

After

Width:  |  Height:  |  Size: 1.1 KiB

@ -6,6 +6,7 @@
#include "llm.h"
#include "download.h"
#include "network.h"
#include "config.h"
int main(int argc, char *argv[])
@ -19,7 +20,7 @@ int main(int argc, char *argv[])
QQmlApplicationEngine engine;
qmlRegisterSingletonInstance("llm", 1, 0, "LLM", LLM::globalInstance());
qmlRegisterSingletonInstance("download", 1, 0, "Download", Download::globalInstance());
qmlRegisterSingletonInstance("network", 1, 0, "Network", Network::globalInstance());
const QUrl url(u"qrc:/gpt4all-chat/main.qml"_qs);
QObject::connect(&engine, &QQmlApplicationEngine::objectCreated,

@ -4,6 +4,7 @@ import QtQuick.Controls
import QtQuick.Controls.Basic
import QtQuick.Layouts
import llm
import network
Window {
id: window
@ -438,10 +439,61 @@ Window {
}
}
NetworkDialog {
id: networkDialog
anchors.centerIn: parent
Item {
Accessible.role: Accessible.Dialog
Accessible.name: qsTr("Network dialog")
Accessible.description: qsTr("Dialog for opt-in to sharing feedback/conversations")
}
}
Button {
id: settingsButton
id: networkButton
anchors.right: parent.right
anchors.top: parent.top
anchors.topMargin: 20
anchors.rightMargin: 30
width: 60
height: 60
z: 200
padding: 15
Accessible.role: Accessible.Button
Accessible.name: qsTr("Network button")
Accessible.description: qsTr("Reveals a dialogue where you can opt-in for sharing data over network")
background: Item {
anchors.fill: parent
Rectangle {
anchors.fill: parent
color: "transparent"
visible: Network.isActive
border.color: "#7d7d8e"
border.width: 1
radius: 10
}
Image {
anchors.centerIn: parent
width: 50
height: 50
source: "qrc:/gpt4all-chat/icons/network.svg"
}
}
onClicked: {
if (Network.isActive)
Network.isActive = false
else
networkDialog.open();
}
}
Button {
id: settingsButton
anchors.right: networkButton.left
anchors.top: parent.top
anchors.topMargin: 30
anchors.rightMargin: 30
width: 60
@ -525,17 +577,7 @@ Window {
}
onClicked: {
var conversation = "";
for (var i = 0; i < chatModel.count; i++) {
var item = chatModel.get(i)
var string = item.name;
if (item.currentResponse)
string += LLM.response
else
string += chatModel.get(i).value
string += "\n"
conversation += string
}
var conversation = getConversation()
copyEdit.text = conversation
copyEdit.selectAll()
copyEdit.copy()
@ -549,6 +591,49 @@ Window {
}
}
function getConversation() {
var conversation = "";
for (var i = 0; i < chatModel.count; i++) {
var item = chatModel.get(i)
var string = item.name;
var isResponse = item.name === qsTr("Response: ")
if (item.currentResponse)
string += LLM.response
else
string += chatModel.get(i).value
if (isResponse && item.stopped)
string += " <stopped>"
string += "\n"
conversation += string
}
return conversation
}
function getConversationJson() {
var str = "{\"conversation\": [";
for (var i = 0; i < chatModel.count; i++) {
var item = chatModel.get(i)
var isResponse = item.name === qsTr("Response: ")
str += "{\"content\": \"";
if (item.currentResponse)
str += LLM.response + "\""
else
str += item.value + "\""
str += ", \"role\": \"" + (isResponse ? "assistant" : "user") + "\"";
if (isResponse && item.thumbsUpState !== item.thumbsDownState)
str += ", \"rating\": \"" + (item.thumbsUpState ? "positive" : "negative") + "\"";
if (isResponse && item.newResponse !== "")
str += ", \"edited_content\": \"" + item.newResponse + "\"";
if (isResponse && item.stopped)
str += ", \"stopped\": \"true\""
if (!isResponse)
str += "},"
else
str += ((i < chatModel.count - 1) ? "}," : "}")
}
return str + "]}"
}
Button {
id: resetContextButton
anchors.right: copyButton.left
@ -586,6 +671,7 @@ Window {
anchors.centerIn: parent
modal: false
opacity: 0.9
padding: 20
Text {
horizontalAlignment: Text.AlignJustify
text: qsTr("ERROR: Update system could not find the MaintenanceTool used<br>
@ -602,7 +688,6 @@ Window {
}
background: Rectangle {
anchors.fill: parent
anchors.margins: -20
color: "#202123"
border.width: 1
border.color: "white"
@ -793,7 +878,6 @@ Window {
wrapMode: Text.WordWrap
focus: false
readOnly: true
padding: 20
font.pixelSize: 24
cursorVisible: currentResponse ? (LLM.response !== "" ? LLM.responseInProgress : false) : false
cursorPosition: text.length
@ -805,7 +889,10 @@ Window {
Accessible.name: name
Accessible.description: name === qsTr("Response: ") ? "The response by the model" : "The prompt by the user"
topPadding: 20
bottomPadding: 20
leftPadding: 100
rightPadding: 100
BusyIndicator {
anchors.left: parent.left
@ -836,6 +923,88 @@ Window {
color: "white"
}
}
ThumbsDownDialog {
id: thumbsDownDialog
property point globalPoint: mapFromItem(window,
window.width / 2 - width / 2,
window.height / 2 - height / 2)
x: globalPoint.x
y: globalPoint.y
property string text: currentResponse ? LLM.response : (value ? value : "")
response: newResponse === "" ? text : newResponse
onAccepted: {
var responseHasChanged = response !== text && response !== newResponse
if (thumbsDownState && !thumbsUpState && !responseHasChanged)
return
newResponse = response
thumbsDownState = true
thumbsUpState = false
Network.sendConversation(getConversationJson());
}
}
Column {
visible: name === qsTr("Response: ") &&
(!currentResponse || !LLM.responseInProgress) && Network.isActive
anchors.right: parent.right
anchors.rightMargin: 20
anchors.top: parent.top
anchors.topMargin: 20
spacing: 10
Item {
width: childrenRect.width
height: childrenRect.height
Button {
id: thumbsUp
width: 30
height: 30
opacity: thumbsUpState || thumbsUpState == thumbsDownState ? 1.0 : 0.2
background: Image {
anchors.fill: parent
source: "qrc:/gpt4all-chat/icons/thumbs_up.svg"
}
onClicked: {
if (thumbsUpState && !thumbsDownState)
return
newResponse = ""
thumbsUpState = true
thumbsDownState = false
Network.sendConversation(getConversationJson());
}
}
Button {
id: thumbsDown
anchors.top: thumbsUp.top
anchors.topMargin: 10
anchors.left: thumbsUp.right
anchors.leftMargin: 2
width: 30
height: 30
checked: thumbsDownState
opacity: thumbsDownState || thumbsUpState == thumbsDownState ? 1.0 : 0.2
transform: [
Matrix4x4 {
matrix: Qt.matrix4x4(-1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1)
},
Translate {
x: thumbsDown.width
}
]
background: Image {
anchors.fill: parent
source: "qrc:/gpt4all-chat/icons/thumbs_down.svg"
}
onClicked: {
thumbsDownDialog.open()
}
}
}
}
}
property bool shouldAutoScroll: true
@ -880,15 +1049,22 @@ Window {
}
leftPadding: 50
onClicked: {
if (LLM.responseInProgress)
if (chatModel.count)
var listElement = chatModel.get(chatModel.count - 1)
if (LLM.responseInProgress) {
listElement.stopped = true
LLM.stopGenerating()
else {
} else {
LLM.regenerateResponse()
if (chatModel.count) {
var listElement = chatModel.get(chatModel.count - 1)
if (listElement.name === qsTr("Response: ")) {
listElement.currentResponse = true
listElement.stopped = false
listElement.value = LLM.response
listElement.thumbsUpState = false
listElement.thumbsDownState = false
listElement.newResponse = ""
LLM.prompt(listElement.prompt, settings.promptTemplate, settings.maxLength,
settings.topK, settings.topP, settings.temperature,
settings.promptBatchSize)
@ -956,10 +1132,14 @@ Window {
listElement.currentResponse = false
listElement.value = LLM.response
}
var prompt = textInput.text + "\n"
chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text})
chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt})
chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false,
"value": textInput.text})
chatModel.append({"id": chatModel.count, "name": qsTr("Response: "),
"currentResponse": true, "value": "", "stopped": false,
"thumbsUpState": false, "thumbsDownState": false,
"newResponse": "",
"prompt": prompt})
LLM.resetResponse()
LLM.prompt(prompt, settings.promptTemplate, settings.maxLength, settings.topK,
settings.topP, settings.temperature, settings.promptBatchSize)

@ -0,0 +1,126 @@
#include "network.h"
#include "llm.h"
#include <QCoreApplication>
#include <QUuid>
#include <QJsonDocument>
#include <QJsonArray>
#include <QJsonObject>
#include <QSettings>
#include <QNetworkRequest>
//#define DEBUG
class MyNetwork: public Network { };
Q_GLOBAL_STATIC(MyNetwork, networkInstance)
Network *Network::globalInstance()
{
return networkInstance();
}
Network::Network()
: QObject{nullptr}
, m_isActive(false)
{
QSettings settings;
settings.sync();
m_isActive = settings.value("network/isActive", false).toBool();
m_uniqueId = settings.value("uniqueId", generateUniqueId()).toString();
settings.setValue("uniqueId", m_uniqueId);
settings.sync();
emit activeChanged();
}
void Network::setActive(bool b)
{
QSettings settings;
settings.setValue("network/isActive", b);
settings.sync();
m_isActive = b;
emit activeChanged();
}
QString Network::generateUniqueId() const
{
return QUuid::createUuid().toString(QUuid::WithoutBraces);
}
bool Network::packageAndSendJson(const QString &json)
{
if (!m_isActive)
return false;
QJsonParseError err;
QJsonDocument doc = QJsonDocument::fromJson(json.toUtf8(), &err);
if (err.error != QJsonParseError::NoError) {
qDebug() << "Couldn't parse: " << json << err.errorString();
return false;
}
Q_ASSERT(doc.isObject());
QJsonObject object = doc.object();
object.insert("source", "gpt4all-chat");
object.insert("agent_id", LLM::globalInstance()->modelName());
object.insert("submitter_id", m_uniqueId);
QSettings settings;
settings.sync();
QString attribution = settings.value("attribution", QString()).toString();
if (!attribution.isEmpty())
object.insert("attribution", attribution);
QJsonDocument newDoc;
newDoc.setObject(object);
#if defined(DEBUG)
printf("%s", qPrintable(newDoc.toJson(QJsonDocument::Indented)));
fflush(stdout);
#endif
QUrl jsonUrl("http://localhost/v1/ingest/chat");
QNetworkRequest request(jsonUrl);
QByteArray body(newDoc.toJson());
request.setHeader(QNetworkRequest::ContentTypeHeader, "application/json");
QNetworkReply *jsonReply = m_networkManager.post(request, body);
connect(jsonReply, &QNetworkReply::finished, this, &Network::handleJsonUploadFinished);
m_activeUploads.append(jsonReply);
return true;
}
void Network::handleJsonUploadFinished()
{
QNetworkReply *jsonReply = qobject_cast<QNetworkReply *>(sender());
if (!jsonReply)
return;
m_activeUploads.removeAll(jsonReply);
QVariant response = jsonReply->attribute(QNetworkRequest::HttpStatusCodeAttribute);
Q_ASSERT(response.isValid());
bool ok;
int code = response.toInt(&ok);
if (!ok)
qWarning() << "ERROR: Invalid response.";
if (code != 200)
qWarning() << "ERROR: response != 200 code:" << code;
QByteArray jsonData = jsonReply->readAll();
QJsonParseError err;
QJsonDocument document = QJsonDocument::fromJson(jsonData, &err);
if (err.error != QJsonParseError::NoError) {
qDebug() << "ERROR: Couldn't parse: " << jsonData << err.errorString();
return;
}
#if defined(DEBUG)
printf("%s", qPrintable(document.toJson(QJsonDocument::Indented)));
fflush(stdout);
#endif
jsonReply->deleteLater();
}
bool Network::sendConversation(const QString &conversation)
{
return packageAndSendJson(conversation);
}

@ -0,0 +1,43 @@
#ifndef NETWORK_H
#define NETWORK_H
#include <QObject>
#include <QNetworkAccessManager>
#include <QNetworkReply>
class Network : public QObject
{
Q_OBJECT
Q_PROPERTY(bool isActive READ isActive WRITE setActive NOTIFY activeChanged)
public:
static Network *globalInstance();
bool isActive() const { return m_isActive; }
void setActive(bool b);
Q_INVOKABLE QString generateUniqueId() const;
Q_INVOKABLE bool sendConversation(const QString &conversation);
Q_SIGNALS:
void activeChanged();
private Q_SLOTS:
void handleJsonUploadFinished();
private:
bool packageAndSendJson(const QString &json);
private:
bool m_isActive;
QString m_uniqueId;
QNetworkAccessManager m_networkManager;
QVector<QNetworkReply*> m_activeUploads;
private:
explicit Network();
~Network() {}
friend class MyNetwork;
};
#endif // LLM_H

@ -0,0 +1,158 @@
import QtCore
import QtQuick
import QtQuick.Controls
import QtQuick.Layouts
import download
import network
import llm
Dialog {
id: networkDialog
anchors.centerIn: parent
modal: true
opacity: 0.9
padding: 20
width: 1024
height: column.height + dialogBox.height + 20
Settings {
id: settings
property string attribution: ""
}
Component.onDestruction: {
settings.sync()
}
Column {
id: column
spacing: 20
Item {
width: childrenRect.width
height: childrenRect.height
Image {
id: img
anchors.top: parent.top
anchors.left: parent.left
width: 60
height: 60
source: "qrc:/gpt4all-chat/icons/logo.svg"
}
Text {
anchors.left: img.right
anchors.leftMargin: 30
anchors.verticalCenter: img.verticalCenter
text: qsTr("Contribute data to the GPT4All Opensource Datalake.")
color: "#d1d5db"
}
}
ScrollView {
clip: true
height: 300
width: 1024 - 40
ScrollBar.vertical.policy: ScrollBar.AlwaysOn
ScrollBar.horizontal.policy: ScrollBar.AlwaysOff
TextArea {
id: textOptIn
wrapMode: Text.Wrap
width: 1024 - 40
padding: 20
text: qsTr("By enabling this feature, you will be able to participate in the democratic process of training a large language model by contributing data for future model improvements.
When a GPT4All model responds to you and you have opted-in, you can like/dislike its response. If you dislike a response, you can suggest an alternative response. This data will be collected and aggregated in the GPT4All Datalake.
NOTE: By turning on this feature, you will be sending your data to the GPT4All Open Source Datalake. You should have no expectation of chat privacy when this feature is enabled. You should; however, have an expectation of an optional attribution if you wish. Your chat data will be openly available for anyone to download and will be used by Nomic AI to improve future GPT4All models. Nomic AI will retain all attribution information attached to your data and you will be credited as a contributor to any GPT4All model release that uses your data!")
color: "#d1d5db"
focus: false
readOnly: true
Accessible.role: Accessible.Paragraph
Accessible.name: qsTr("Terms for opt-in")
Accessible.description: qsTr("Describes what will happen when you opt-in")
background: Rectangle {
color: "#343541"
radius: 10
}
}
}
TextField {
id: attribution
color: "#dadadc"
padding: 20
width: parent.width
text: settings.attribution
font.pixelSize: 24
placeholderText: qsTr("Please provide a name for attribution (optional)")
placeholderTextColor: "#7d7d8e"
background: Rectangle {
color: "#40414f"
radius: 10
}
Accessible.role: Accessible.EditableText
Accessible.name: qsTr("Attribution (optional)")
Accessible.description: qsTr("Textfield for providing attribution")
onEditingFinished: {
settings.attribution = attribution.text;
settings.sync();
}
}
}
background: Rectangle {
anchors.fill: parent
color: "#202123"
border.width: 1
border.color: "white"
radius: 10
}
footer: DialogButtonBox {
id: dialogBox
padding: 20
alignment: Qt.AlignRight
spacing: 10
Button {
text: qsTr("Enable")
background: Rectangle {
border.color: "#7d7d8e"
border.width: 1
radius: 10
color: "#343541"
}
Accessible.role: Accessible.Button
Accessible.name: text
Accessible.description: qsTr("Enable opt-in button")
padding: 15
DialogButtonBox.buttonRole: DialogButtonBox.AcceptRole
}
Button {
text: qsTr("Cancel")
background: Rectangle {
border.color: "#7d7d8e"
border.width: 1
radius: 10
color: "#343541"
}
Accessible.role: Accessible.Button
Accessible.name: text
Accessible.description: qsTr("Cancel opt-in button")
padding: 15
DialogButtonBox.buttonRole: DialogButtonBox.RejectRole
}
background: Rectangle {
color: "transparent"
}
}
onAccepted: {
Network.isActive = true;
}
onRejected: {
Network.isActive = false;
}
}

@ -0,0 +1,102 @@
import QtCore
import QtQuick
import QtQuick.Controls
import QtQuick.Layouts
import download
import network
import llm
Dialog {
id: thumbsDownDialog
modal: true
opacity: 0.9
padding: 20
width: 900
property alias response: thumbsDownNewResponse.text
Column {
anchors.fill: parent
spacing: 20
Item {
width: childrenRect.width
height: childrenRect.height
Image {
id: img
anchors.top: parent.top
anchors.left: parent.left
width: 60
height: 60
source: "qrc:/gpt4all-chat/icons/thumbs_down.svg"
}
Text {
anchors.left: img.right
anchors.leftMargin: 30
anchors.verticalCenter: img.verticalCenter
text: qsTr("Provide feedback for negative rating")
color: "#d1d5db"
}
}
ScrollView {
clip: true
height: 300
width: parent.width
ScrollBar.vertical.policy: ScrollBar.AlwaysOn
ScrollBar.horizontal.policy: ScrollBar.AlwaysOff
TextArea {
id: thumbsDownNewResponse
color: "#dadadc"
padding: 20
width: parent.width
height: 300
wrapMode: Text.Wrap
font.pixelSize: 24
placeholderText: qsTr("Please provide a better response...")
placeholderTextColor: "#7d7d8e"
background: Rectangle {
color: "#40414f"
radius: 10
}
}
}
}
background: Rectangle {
anchors.fill: parent
color: "#202123"
border.width: 1
border.color: "white"
radius: 10
}
footer: DialogButtonBox {
padding: 20
alignment: Qt.AlignRight
spacing: 10
Button {
text: qsTr("Submit")
background: Rectangle {
border.color: "#7d7d8e"
border.width: 1
radius: 10
color: "#343541"
}
padding: 15
DialogButtonBox.buttonRole: DialogButtonBox.AcceptRole
}
Button {
text: qsTr("Cancel")
background: Rectangle {
border.color: "#7d7d8e"
border.width: 1
radius: 10
color: "#343541"
}
padding: 15
DialogButtonBox.buttonRole: DialogButtonBox.RejectRole
}
background: Rectangle {
color: "transparent"
}
}
}
Loading…
Cancel
Save