Don't repeat the prompt in the response.

This commit is contained in:
Adam Treat
2023-04-09 01:11:52 -04:00
parent 0903da3afa
commit 02e13737f3
2 changed files with 9 additions and 5 deletions

View File

@@ -80,7 +80,7 @@ Window {
model: chatModel
delegate: TextArea {
text: currentResponse ? LLM.response : value
width: parent.width
width: listView.width
color: "#d1d5db"
wrapMode: Text.WordWrap
focus: false
@@ -204,11 +204,12 @@ Window {
listElement.currentResponse = false
listElement.value = LLM.response
}
var prompt = textInput.text + "\n"
chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text})
chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": textInput.text})
chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt})
LLM.resetResponse()
LLM.prompt(textInput.text)
LLM.prompt(prompt)
textInput.text = ""
}