Naive version of chat context, but slow.

pull/520/head
Adam Treat 1 year ago
parent 596592ce12
commit 91a2602d93

@ -205,10 +205,18 @@ Window {
listElement.currentResponse = false
listElement.value = LLM.response
}
var prompt = textInput.text + "\n"
chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text})
chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt})
// var contextPrompt = ""
// for (var i = 0; i < chatModel.count; ++i) {
// var listElement = chatModel.get(i)
// contextPrompt += listElement.value + "\n";
// }
// prompt = contextPrompt + textInput.text + "\n"
LLM.resetResponse()
LLM.prompt(prompt)
textInput.text = ""

Loading…
Cancel
Save