diff --git a/gpt4all-chat/CHANGELOG.md b/gpt4all-chat/CHANGELOG.md index 3a455d10..6478ea72 100644 --- a/gpt4all-chat/CHANGELOG.md +++ b/gpt4all-chat/CHANGELOG.md @@ -10,6 +10,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - Fix API server ignoring assistant messages in history after v3.5.0 ([#3256](https://github.com/nomic-ai/gpt4all/pull/3256)) - Fix API server replying with incorrect token counts and stop reason after v3.5.0 ([#3256](https://github.com/nomic-ai/gpt4all/pull/3256)) - Fix API server remembering previous, unrelated conversations after v3.5.0 ([#3256](https://github.com/nomic-ai/gpt4all/pull/3256)) +- Fix mishandling of default chat template and system message of cloned models in v3.5.0 ([#3262](https://github.com/nomic-ai/gpt4all/pull/3262)) ## [3.5.1] - 2024-12-10 diff --git a/gpt4all-chat/src/modellist.cpp b/gpt4all-chat/src/modellist.cpp index d24ef859..1f2c4e35 100644 --- a/gpt4all-chat/src/modellist.cpp +++ b/gpt4all-chat/src/modellist.cpp @@ -1141,18 +1141,20 @@ bool ModelList::isUniqueName(const QString &name) const QString ModelList::clone(const ModelInfo &model) { + auto *mySettings = MySettings::globalInstance(); + const QString id = Network::globalInstance()->generateUniqueId(); addModel(id); - QString chatTemplate, systemMessage; + QString tmplSetting, sysmsgSetting; if (auto tmpl = model.chatTemplate().asModern()) { - chatTemplate = *tmpl; + tmplSetting = *tmpl; } else { qWarning("ModelList Warning: attempted to clone model with legacy chat template"); return {}; } if (auto msg = model.systemMessage().asModern()) { - systemMessage = *msg; + sysmsgSetting = *msg; } else { qWarning("ModelList Warning: attempted to clone model with legacy system message"); return {}; @@ -1177,12 +1179,22 @@ QString ModelList::clone(const ModelInfo &model) { ModelList::GpuLayersRole, model.gpuLayers() }, { ModelList::RepeatPenaltyRole, model.repeatPenalty() }, { ModelList::RepeatPenaltyTokensRole, model.repeatPenaltyTokens() }, - { ModelList::ChatTemplateRole, chatTemplate }, - { ModelList::SystemMessageRole, systemMessage }, + { ModelList::SystemMessageRole, model.m_systemMessage }, { ModelList::ChatNamePromptRole, model.chatNamePrompt() }, { ModelList::SuggestedFollowUpPromptRole, model.suggestedFollowUpPrompt() }, }; + if (auto tmpl = model.m_chatTemplate) + data.emplace_back(ModelList::ChatTemplateRole, *tmpl); // copy default chat template, if known updateData(id, data); + + // Ensure setting overrides are copied in case the base model overrides change. + // This is necessary because setting these roles on ModelInfo above does not write to settings. + auto cloneInfo = modelInfo(id); + if (mySettings->isModelChatTemplateSet (model)) + mySettings->setModelChatTemplate (cloneInfo, tmplSetting ); + if (mySettings->isModelSystemMessageSet(model)) + mySettings->setModelSystemMessage(cloneInfo, sysmsgSetting); + return id; }