mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-09-05 10:30:29 +00:00
LocalDocs version 2 with text embeddings.
This commit is contained in:
@@ -325,6 +325,10 @@ Window {
|
||||
anchors.centerIn: parent
|
||||
width: Math.min(1280, window.width - (window.width * .1))
|
||||
height: window.height - (window.height * .1)
|
||||
onDownloadClicked: {
|
||||
downloadNewModels.showEmbeddingModels = true
|
||||
downloadNewModels.open()
|
||||
}
|
||||
}
|
||||
|
||||
Button {
|
||||
@@ -652,6 +656,7 @@ Window {
|
||||
width: Math.min(600, 0.3 * window.width)
|
||||
height: window.height - y
|
||||
onDownloadClicked: {
|
||||
downloadNewModels.showEmbeddingModels = false
|
||||
downloadNewModels.open()
|
||||
}
|
||||
onAboutClicked: {
|
||||
@@ -818,11 +823,11 @@ Window {
|
||||
color: theme.textAccent
|
||||
text: {
|
||||
switch (currentChat.responseState) {
|
||||
case Chat.ResponseStopped: return "response stopped ...";
|
||||
case Chat.LocalDocsRetrieval: return "retrieving " + currentChat.collectionList.join(", ") + " ...";
|
||||
case Chat.LocalDocsProcessing: return "processing " + currentChat.collectionList.join(", ") + " ...";
|
||||
case Chat.PromptProcessing: return "processing ..."
|
||||
case Chat.ResponseGeneration: return "generating response ...";
|
||||
case Chat.ResponseStopped: return qsTr("response stopped ...");
|
||||
case Chat.LocalDocsRetrieval: return qsTr("retrieving localdocs: ") + currentChat.collectionList.join(", ") + " ...";
|
||||
case Chat.LocalDocsProcessing: return qsTr("searching localdocs: ") + currentChat.collectionList.join(", ") + " ...";
|
||||
case Chat.PromptProcessing: return qsTr("processing ...")
|
||||
case Chat.ResponseGeneration: return qsTr("generating response ...");
|
||||
default: return ""; // handle unexpected values
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user