mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-18 03:25:46 +00:00
Fix for stale references after we regenerate.
This commit is contained in:
parent
aea94f756d
commit
28944ac01b
@ -96,10 +96,22 @@ bool Chat::isModelLoaded() const
|
|||||||
return m_llmodel->isModelLoaded();
|
return m_llmodel->isModelLoaded();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Chat::resetResponseState()
|
||||||
|
{
|
||||||
|
if (m_responseInProgress && m_responseState == Chat::LocalDocsRetrieval)
|
||||||
|
return;
|
||||||
|
|
||||||
|
m_responseInProgress = true;
|
||||||
|
m_responseState = Chat::LocalDocsRetrieval;
|
||||||
|
emit responseInProgressChanged();
|
||||||
|
emit responseStateChanged();
|
||||||
|
}
|
||||||
|
|
||||||
void Chat::prompt(const QString &prompt, const QString &prompt_template, int32_t n_predict,
|
void Chat::prompt(const QString &prompt, const QString &prompt_template, int32_t n_predict,
|
||||||
int32_t top_k, float top_p, float temp, int32_t n_batch, float repeat_penalty,
|
int32_t top_k, float top_p, float temp, int32_t n_batch, float repeat_penalty,
|
||||||
int32_t repeat_penalty_tokens)
|
int32_t repeat_penalty_tokens)
|
||||||
{
|
{
|
||||||
|
resetResponseState();
|
||||||
emit promptRequested(
|
emit promptRequested(
|
||||||
prompt,
|
prompt,
|
||||||
prompt_template,
|
prompt_template,
|
||||||
@ -115,6 +127,8 @@ void Chat::prompt(const QString &prompt, const QString &prompt_template, int32_t
|
|||||||
|
|
||||||
void Chat::regenerateResponse()
|
void Chat::regenerateResponse()
|
||||||
{
|
{
|
||||||
|
const int index = m_chatModel->count() - 1;
|
||||||
|
m_chatModel->updateReferences(index, QString(), QList<QString>());
|
||||||
emit regenerateResponseRequested();
|
emit regenerateResponseRequested();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -234,10 +248,7 @@ void Chat::setModelName(const QString &modelName)
|
|||||||
|
|
||||||
void Chat::newPromptResponsePair(const QString &prompt)
|
void Chat::newPromptResponsePair(const QString &prompt)
|
||||||
{
|
{
|
||||||
m_responseInProgress = true;
|
resetResponseState();
|
||||||
m_responseState = Chat::LocalDocsRetrieval;
|
|
||||||
emit responseInProgressChanged();
|
|
||||||
emit responseStateChanged();
|
|
||||||
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
|
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
|
||||||
m_chatModel->appendPrompt(tr("Prompt: "), prompt);
|
m_chatModel->appendPrompt(tr("Prompt: "), prompt);
|
||||||
m_chatModel->appendResponse(tr("Response: "), prompt);
|
m_chatModel->appendResponse(tr("Response: "), prompt);
|
||||||
@ -246,11 +257,8 @@ void Chat::newPromptResponsePair(const QString &prompt)
|
|||||||
|
|
||||||
void Chat::serverNewPromptResponsePair(const QString &prompt)
|
void Chat::serverNewPromptResponsePair(const QString &prompt)
|
||||||
{
|
{
|
||||||
m_responseInProgress = true;
|
resetResponseState();
|
||||||
m_responseState = Chat::LocalDocsRetrieval;
|
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
|
||||||
emit responseInProgressChanged();
|
|
||||||
emit responseStateChanged();
|
|
||||||
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
|
|
||||||
m_chatModel->appendPrompt(tr("Prompt: "), prompt);
|
m_chatModel->appendPrompt(tr("Prompt: "), prompt);
|
||||||
m_chatModel->appendResponse(tr("Response: "), prompt);
|
m_chatModel->appendResponse(tr("Response: "), prompt);
|
||||||
}
|
}
|
||||||
|
@ -87,6 +87,7 @@ public:
|
|||||||
Q_INVOKABLE bool hasCollection(const QString &collection) const;
|
Q_INVOKABLE bool hasCollection(const QString &collection) const;
|
||||||
Q_INVOKABLE void addCollection(const QString &collection);
|
Q_INVOKABLE void addCollection(const QString &collection);
|
||||||
Q_INVOKABLE void removeCollection(const QString &collection);
|
Q_INVOKABLE void removeCollection(const QString &collection);
|
||||||
|
void resetResponseState();
|
||||||
|
|
||||||
public Q_SLOTS:
|
public Q_SLOTS:
|
||||||
void serverNewPromptResponsePair(const QString &prompt);
|
void serverNewPromptResponsePair(const QString &prompt);
|
||||||
|
Loading…
Reference in New Issue
Block a user