mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-02 09:40:42 +00:00
Add multi-line prompt support.
This commit is contained in:
parent
4c970fdc9c
commit
cd5f525950
115
main.qml
115
main.qml
@ -755,7 +755,7 @@ Window {
|
|||||||
anchors.left: parent.left
|
anchors.left: parent.left
|
||||||
anchors.right: parent.right
|
anchors.right: parent.right
|
||||||
anchors.top: parent.top
|
anchors.top: parent.top
|
||||||
anchors.bottom: textInput.top
|
anchors.bottom: textInputView.top
|
||||||
anchors.bottomMargin: 30
|
anchors.bottomMargin: 30
|
||||||
ScrollBar.vertical.policy: ScrollBar.AlwaysOn
|
ScrollBar.vertical.policy: ScrollBar.AlwaysOn
|
||||||
|
|
||||||
@ -886,8 +886,8 @@ Window {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
anchors.bottom: textInput.top
|
anchors.bottom: textInputView.top
|
||||||
anchors.horizontalCenter: textInput.horizontalCenter
|
anchors.horizontalCenter: textInputView.horizontalCenter
|
||||||
anchors.bottomMargin: 40
|
anchors.bottomMargin: 40
|
||||||
padding: 15
|
padding: 15
|
||||||
contentItem: Text {
|
contentItem: Text {
|
||||||
@ -906,65 +906,76 @@ Window {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TextField {
|
ScrollView {
|
||||||
id: textInput
|
id: textInputView
|
||||||
anchors.left: parent.left
|
anchors.left: parent.left
|
||||||
anchors.right: parent.right
|
anchors.right: parent.right
|
||||||
anchors.bottom: parent.bottom
|
anchors.bottom: parent.bottom
|
||||||
anchors.margins: 30
|
anchors.margins: 30
|
||||||
color: "#dadadc"
|
height: Math.min(contentHeight, 200)
|
||||||
padding: 20
|
|
||||||
enabled: LLM.isModelLoaded
|
|
||||||
font.pixelSize: 24
|
|
||||||
placeholderText: qsTr("Send a message...")
|
|
||||||
placeholderTextColor: "#7d7d8e"
|
|
||||||
background: Rectangle {
|
|
||||||
color: "#40414f"
|
|
||||||
radius: 10
|
|
||||||
}
|
|
||||||
Accessible.role: Accessible.EditableText
|
|
||||||
Accessible.name: placeholderText
|
|
||||||
Accessible.description: qsTr("Textfield for sending messages/prompts to the model")
|
|
||||||
onAccepted: {
|
|
||||||
if (textInput.text === "")
|
|
||||||
return
|
|
||||||
|
|
||||||
LLM.stopGenerating()
|
TextArea {
|
||||||
|
id: textInput
|
||||||
if (chatModel.count) {
|
color: "#dadadc"
|
||||||
var listElement = chatModel.get(chatModel.count - 1)
|
padding: 20
|
||||||
listElement.currentResponse = false
|
enabled: LLM.isModelLoaded
|
||||||
listElement.value = LLM.response
|
font.pixelSize: 24
|
||||||
|
placeholderText: qsTr("Send a message...")
|
||||||
|
placeholderTextColor: "#7d7d8e"
|
||||||
|
background: Rectangle {
|
||||||
|
color: "#40414f"
|
||||||
|
radius: 10
|
||||||
}
|
}
|
||||||
|
Accessible.role: Accessible.EditableText
|
||||||
|
Accessible.name: placeholderText
|
||||||
|
Accessible.description: qsTr("Textfield for sending messages/prompts to the model")
|
||||||
|
Keys.onReturnPressed: {
|
||||||
|
if (event.modifiers & Qt.ControlModifier || event.modifiers & Qt.ShiftModifier)
|
||||||
|
event.accepted = false;
|
||||||
|
else
|
||||||
|
editingFinished();
|
||||||
|
}
|
||||||
|
onEditingFinished: {
|
||||||
|
if (textInput.text === "")
|
||||||
|
return
|
||||||
|
|
||||||
var prompt = textInput.text + "\n"
|
LLM.stopGenerating()
|
||||||
chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text})
|
|
||||||
chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt})
|
if (chatModel.count) {
|
||||||
LLM.resetResponse()
|
var listElement = chatModel.get(chatModel.count - 1)
|
||||||
LLM.prompt(prompt, settingsDialog.promptTemplate, settingsDialog.maxLength, settingsDialog.topK,
|
listElement.currentResponse = false
|
||||||
settingsDialog.topP, settingsDialog.temperature, settingsDialog.promptBatchSize)
|
listElement.value = LLM.response
|
||||||
textInput.text = ""
|
}
|
||||||
|
|
||||||
|
var prompt = textInput.text + "\n"
|
||||||
|
chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text})
|
||||||
|
chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt})
|
||||||
|
LLM.resetResponse()
|
||||||
|
LLM.prompt(prompt, settingsDialog.promptTemplate, settingsDialog.maxLength, settingsDialog.topK,
|
||||||
|
settingsDialog.topP, settingsDialog.temperature, settingsDialog.promptBatchSize)
|
||||||
|
textInput.text = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Button {
|
||||||
|
anchors.right: textInputView.right
|
||||||
|
anchors.verticalCenter: textInputView.verticalCenter
|
||||||
|
anchors.rightMargin: 15
|
||||||
|
width: 30
|
||||||
|
height: 30
|
||||||
|
|
||||||
|
background: Image {
|
||||||
|
anchors.centerIn: parent
|
||||||
|
source: "qrc:/gpt4all-chat/icons/send_message.svg"
|
||||||
}
|
}
|
||||||
|
|
||||||
Button {
|
Accessible.role: Accessible.Button
|
||||||
anchors.right: textInput.right
|
Accessible.name: qsTr("Send the message button")
|
||||||
anchors.verticalCenter: textInput.verticalCenter
|
Accessible.description: qsTr("Sends the message/prompt contained in textfield to the model")
|
||||||
anchors.rightMargin: 15
|
|
||||||
width: 30
|
|
||||||
height: 30
|
|
||||||
|
|
||||||
background: Image {
|
onClicked: {
|
||||||
anchors.centerIn: parent
|
textInput.accepted()
|
||||||
source: "qrc:/gpt4all-chat/icons/send_message.svg"
|
|
||||||
}
|
|
||||||
|
|
||||||
Accessible.role: Accessible.Button
|
|
||||||
Accessible.name: qsTr("Send the message button")
|
|
||||||
Accessible.description: qsTr("Sends the message/prompt contained in textfield to the model")
|
|
||||||
|
|
||||||
onClicked: {
|
|
||||||
textInput.accepted()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user