Fix for issue #2080 where the GUI appears to hang when a chat with a large

model is deleted. There is no reason to save the context for a chat that
is being deleted.

Signed-off-by: Adam Treat <treat.adam@gmail.com>
This commit is contained in:
Adam Treat
2024-03-06 12:59:34 -05:00
committed by AT
parent 44717682a7
commit 17dee02287
5 changed files with 14 additions and 1 deletions

View File

@@ -64,6 +64,7 @@ ChatLLM::ChatLLM(Chat *parent, bool isServer)
, m_isRecalc(false)
, m_shouldBeLoaded(false)
, m_forceUnloadModel(false)
, m_markedForDeletion(false)
, m_shouldTrySwitchContext(false)
, m_stopGenerating(false)
, m_timer(nullptr)
@@ -690,7 +691,9 @@ void ChatLLM::unloadModel()
else
emit modelLoadingPercentageChanged(std::numeric_limits<float>::min()); // small non-zero positive value
saveState();
if (!m_markedForDeletion)
saveState();
#if defined(DEBUG_MODEL_LOADING)
qDebug() << "unloadModel" << m_llmThread.objectName() << m_llModelInfo.model;
#endif