From bb834ecde812d79abd42a36ea53427eab0bdeb04 Mon Sep 17 00:00:00 2001 From: Adam Treat Date: Fri, 21 Apr 2023 09:56:06 -0400 Subject: [PATCH] Restore basic functionality. --- main.qml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/main.qml b/main.qml index 5233f329..51f8ebdb 100644 --- a/main.qml +++ b/main.qml @@ -889,9 +889,9 @@ Window { if (listElement.name === qsTr("Response: ")) { listElement.currentResponse = true listElement.value = LLM.response - LLM.prompt(listElement.prompt, settingsDialog.promptTemplate, settingsDialog.maxLength, - settingsDialog.topK, settingsDialog.topP, settingsDialog.temperature, - settingsDialog.promptBatchSize) + LLM.prompt(listElement.prompt, settings.promptTemplate, settings.maxLength, + settings.topK, settings.topP, settings.temperature, + settings.promptBatchSize) } } } @@ -961,8 +961,8 @@ Window { chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text}) chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt}) LLM.resetResponse() - LLM.prompt(prompt, settingsDialog.promptTemplate, settingsDialog.maxLength, settingsDialog.topK, - settingsDialog.topP, settingsDialog.temperature, settingsDialog.promptBatchSize) + LLM.prompt(prompt, settings.promptTemplate, settings.maxLength, settings.topK, + settings.topP, settings.temperature, settings.promptBatchSize) textInput.text = "" } }