Naive version of chat context, but slow.

This commit is contained in:
Adam Treat 2023-04-09 12:59:58 -04:00
parent b0817a2582
commit 9c0b216e7c

View File

@ -205,10 +205,18 @@ Window {
listElement.currentResponse = false
listElement.value = LLM.response
}
var prompt = textInput.text + "\n"
chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text})
chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt})
// var contextPrompt = ""
// for (var i = 0; i < chatModel.count; ++i) {
// var listElement = chatModel.get(i)
// contextPrompt += listElement.value + "\n";
// }
// prompt = contextPrompt + textInput.text + "\n"
LLM.resetResponse()
LLM.prompt(prompt)
textInput.text = ""