mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-06-26 15:31:55 +00:00
Handle the fwd of important signals from LLM object so qml doesn't have to deal with which chat is current.
This commit is contained in:
parent
8b94a23253
commit
482f543675
14
llm.cpp
14
llm.cpp
@ -25,13 +25,18 @@ LLM::LLM()
|
||||
{
|
||||
connect(Download::globalInstance(), &Download::modelListChanged,
|
||||
this, &LLM::modelListChanged, Qt::QueuedConnection);
|
||||
// FIXME: This should be moved to connect whenever we make a new chat object in future
|
||||
// FIXME: These should be moved to connect whenever we make a new chat object in future
|
||||
connect(m_currentChat, &Chat::modelNameChanged,
|
||||
this, &LLM::modelListChanged, Qt::QueuedConnection);
|
||||
connect(m_currentChat, &Chat::recalcChanged,
|
||||
this, &LLM::recalcChanged, Qt::QueuedConnection);
|
||||
connect(m_currentChat, &Chat::responseChanged,
|
||||
this, &LLM::responseChanged, Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
QList<QString> LLM::modelList() const
|
||||
{
|
||||
Q_ASSERT(m_currentChat);
|
||||
// Build a model list from exepath and from the localpath
|
||||
QList<QString> list;
|
||||
|
||||
@ -107,3 +112,10 @@ bool LLM::checkForUpdates() const
|
||||
|
||||
return QProcess::startDetached(fileName);
|
||||
}
|
||||
|
||||
bool LLM::isRecalc() const
|
||||
{
|
||||
Q_ASSERT(m_currentChat);
|
||||
return m_currentChat->isRecalc();
|
||||
}
|
||||
|
||||
|
4
llm.h
4
llm.h
@ -10,6 +10,7 @@ class LLM : public QObject
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(QList<QString> modelList READ modelList NOTIFY modelListChanged)
|
||||
Q_PROPERTY(Chat *currentChat READ currentChat NOTIFY currentChatChanged)
|
||||
Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged)
|
||||
|
||||
public:
|
||||
|
||||
@ -18,10 +19,13 @@ public:
|
||||
QList<QString> modelList() const;
|
||||
Q_INVOKABLE bool checkForUpdates() const;
|
||||
Chat *currentChat() const { return m_currentChat; }
|
||||
bool isRecalc() const;
|
||||
|
||||
Q_SIGNALS:
|
||||
void modelListChanged();
|
||||
void currentChatChanged();
|
||||
void recalcChanged();
|
||||
void responseChanged();
|
||||
|
||||
private:
|
||||
Chat *m_currentChat;
|
||||
|
8
main.qml
8
main.qml
@ -353,10 +353,9 @@ Window {
|
||||
text: qsTr("Recalculating context.")
|
||||
|
||||
Connections {
|
||||
// FIXME: This connection has to be setup everytime a new chat object is created
|
||||
target: LLM.currentChat
|
||||
target: LLM
|
||||
function onRecalcChanged() {
|
||||
if (LLM.currentChat.isRecalc)
|
||||
if (LLM.isRecalc)
|
||||
recalcPopup.open()
|
||||
else
|
||||
recalcPopup.close()
|
||||
@ -817,8 +816,7 @@ Window {
|
||||
property bool isAutoScrolling: false
|
||||
|
||||
Connections {
|
||||
// FIXME: This connection has to be setup everytime a new chat object is created
|
||||
target: LLM.currentChat
|
||||
target: LLM
|
||||
function onResponseChanged() {
|
||||
if (listView.shouldAutoScroll) {
|
||||
listView.isAutoScrolling = true
|
||||
|
Loading…
Reference in New Issue
Block a user