Add a reset context feature to clear the chat history and the context for now.

This commit is contained in:
Adam Treat
2023-04-10 17:13:22 -04:00
parent bbc00a5ea8
commit b1b7744241
4 changed files with 43 additions and 4 deletions

16
llm.cpp
View File

@@ -13,6 +13,8 @@ LLM *LLM::globalInstance()
return llmInstance();
}
static GPTJ::PromptContext s_ctx;
GPTJObject::GPTJObject()
: QObject{nullptr}
, m_gptj(new GPTJ)
@@ -51,6 +53,11 @@ void GPTJObject::resetResponse()
m_response = std::string();
}
void GPTJObject::resetContext()
{
s_ctx = GPTJ::PromptContext();
}
QString GPTJObject::response() const
{
return QString::fromStdString(m_response);
@@ -75,8 +82,7 @@ bool GPTJObject::prompt(const QString &prompt)
m_stopGenerating = false;
auto func = std::bind(&GPTJObject::handleResponse, this, std::placeholders::_1);
emit responseStarted();
static GPTJ::PromptContext ctx;
m_gptj->prompt(prompt.toStdString(), func, ctx, 4096 /*number of chars to predict*/);
m_gptj->prompt(prompt.toStdString(), func, s_ctx, 4096 /*number of chars to predict*/);
emit responseStopped();
return true;
}
@@ -93,6 +99,7 @@ LLM::LLM()
connect(this, &LLM::promptRequested, m_gptj, &GPTJObject::prompt, Qt::QueuedConnection);
connect(this, &LLM::resetResponseRequested, m_gptj, &GPTJObject::resetResponse, Qt::BlockingQueuedConnection);
connect(this, &LLM::resetContextRequested, m_gptj, &GPTJObject::resetContext, Qt::BlockingQueuedConnection);
}
bool LLM::isModelLoaded() const
@@ -110,6 +117,11 @@ void LLM::resetResponse()
emit resetResponseRequested(); // blocking queued connection
}
void LLM::resetContext()
{
emit resetContextRequested(); // blocking queued connection
}
void LLM::stopGenerating()
{
m_gptj->stopGenerating();