mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-06 09:20:33 +00:00
Provide a busy indicator if we're processing a long prompt and make the
stop button work in the middle of processing a long prompt as well.
This commit is contained in:
parent
54cd0920f1
commit
a06fd8a487
3
gptj.cpp
3
gptj.cpp
@ -707,6 +707,9 @@ void GPTJ::prompt(const std::string &prompt, std::function<bool(const std::strin
|
||||
std::cerr << "GPT-J ERROR: Failed to process prompt\n";
|
||||
return;
|
||||
}
|
||||
// We pass a null string to see if the user has asked us to stop...
|
||||
if (!response(""))
|
||||
return;
|
||||
ctx.n_past += batch.size();
|
||||
i = batch_end;
|
||||
}
|
||||
|
7
llm.cpp
7
llm.cpp
@ -65,6 +65,7 @@ bool GPTJObject::isModelLoaded() const
|
||||
void GPTJObject::resetResponse()
|
||||
{
|
||||
m_response = std::string();
|
||||
emit responseChanged();
|
||||
}
|
||||
|
||||
void GPTJObject::resetContext()
|
||||
@ -88,8 +89,10 @@ bool GPTJObject::handleResponse(const std::string &response)
|
||||
printf("%s", response.c_str());
|
||||
fflush(stdout);
|
||||
#endif
|
||||
m_response.append(response);
|
||||
emit responseChanged();
|
||||
if (!response.empty()) {
|
||||
m_response.append(response);
|
||||
emit responseChanged();
|
||||
}
|
||||
return !m_stopGenerating;
|
||||
}
|
||||
|
||||
|
11
main.qml
11
main.qml
@ -310,7 +310,7 @@ Window {
|
||||
focus: false
|
||||
padding: 20
|
||||
font.pixelSize: 24
|
||||
cursorVisible: currentResponse ? LLM.responseInProgress : false
|
||||
cursorVisible: currentResponse ? (LLM.response !== "" ? LLM.responseInProgress : false) : false
|
||||
cursorPosition: text.length
|
||||
background: Rectangle {
|
||||
color: name === qsTr("Response: ") ? "#444654" : "#343541"
|
||||
@ -318,6 +318,15 @@ Window {
|
||||
|
||||
leftPadding: 100
|
||||
|
||||
BusyIndicator {
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 90
|
||||
anchors.top: parent.top
|
||||
anchors.topMargin: 5
|
||||
visible: currentResponse && LLM.response === "" && LLM.responseInProgress
|
||||
running: currentResponse && LLM.response === "" && LLM.responseInProgress
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
anchors.left: parent.left
|
||||
anchors.top: parent.top
|
||||
|
Loading…
Reference in New Issue
Block a user