chatllm: do not report 100% progress until actually complete

Signed-off-by: Jared Van Bortel <jared@nomic.ai>
This commit is contained in:
Jared Van Bortel 2024-05-30 16:33:45 -04:00
parent cff5a53718
commit a16df5d261

View File

@ -374,6 +374,7 @@ bool ChatLLM::loadModel(const ModelInfo &modelInfo)
m_llModelInfo.model->setProgressCallback([this](float progress) -> bool { m_llModelInfo.model->setProgressCallback([this](float progress) -> bool {
progress = std::max(progress, std::numeric_limits<float>::min()); // keep progress above zero progress = std::max(progress, std::numeric_limits<float>::min()); // keep progress above zero
progress = std::min(progress, std::nextafter(1.0f, 0.0f)); // keep progress below 100% until we are actually done
emit modelLoadingPercentageChanged(progress); emit modelLoadingPercentageChanged(progress);
return m_shouldBeLoaded; return m_shouldBeLoaded;
}); });