diff --git a/gpt4all-chat/CHANGELOG.md b/gpt4all-chat/CHANGELOG.md index 8d5b2716..d72df56e 100644 --- a/gpt4all-chat/CHANGELOG.md +++ b/gpt4all-chat/CHANGELOG.md @@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). +## [Unreleased] + +### Fixed +- Fix an incorrect value for currentResponse ([#3245](https://github.com/nomic-ai/gpt4all/pull/3245)) + ## [3.5.0] - 2024-12-09 ### Changed @@ -203,6 +208,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - Fix several Vulkan resource management issues ([#2694](https://github.com/nomic-ai/gpt4all/pull/2694)) - Fix crash/hang when some models stop generating, by showing special tokens ([#2701](https://github.com/nomic-ai/gpt4all/pull/2701)) +[Unreleased]: https://github.com/nomic-ai/gpt4all/compare/v3.5.0...HEAD [3.5.0]: https://github.com/nomic-ai/gpt4all/compare/v3.5.0-rc2...v3.5.0 [3.5.0-rc2]: https://github.com/nomic-ai/gpt4all/compare/v3.5.0-rc1...v3.5.0-rc2 [3.5.0-rc1]: https://github.com/nomic-ai/gpt4all/compare/v3.4.2...v3.5.0-rc1 diff --git a/gpt4all-chat/src/chatmodel.h b/gpt4all-chat/src/chatmodel.h index 324de5b6..1ac5843c 100644 --- a/gpt4all-chat/src/chatmodel.h +++ b/gpt4all-chat/src/chatmodel.h @@ -193,9 +193,6 @@ public: NameRole = Qt::UserRole + 1, ValueRole, - // prompts and responses - PeerRole, - // prompts PromptAttachmentsRole, @@ -266,18 +263,6 @@ public: return item->name; case ValueRole: return item->value; - case PeerRole: - switch (item->type()) { - using enum ChatItem::Type; - case Prompt: - case Response: - { - auto peer = getPeerUnlocked(item); - return peer ? QVariant::fromValue(**peer) : QVariant::fromValue(nullptr); - } - default: - return QVariant(); - } case PromptAttachmentsRole: return QVariant::fromValue(item->promptAttachments); case SourcesRole: @@ -320,7 +305,6 @@ public: return { { NameRole, "name" }, { ValueRole, "value" }, - { PeerRole, "peer" }, { PromptAttachmentsRole, "promptAttachments" }, { SourcesRole, "sources" }, { ConsolidatedSourcesRole, "consolidatedSources" }, @@ -362,18 +346,13 @@ public: count = m_chatItems.count(); } - int promptIndex = 0; beginInsertRows(QModelIndex(), count, count); { QMutexLocker locker(&m_mutex); - m_chatItems.emplace_back(ChatItem::response_tag, promptIndex); - if (auto pi = getPeerUnlocked(m_chatItems.size() - 1)) - promptIndex = *pi; + m_chatItems.emplace_back(ChatItem::response_tag); } endInsertRows(); emit countChanged(); - if (promptIndex >= 0) - emit dataChanged(createIndex(promptIndex, 0), createIndex(promptIndex, 0), {PeerRole}); } // Used by Server to append a new conversation to the chat log.