StartupDialog: fix two untranslated strings (#3293)

Signed-off-by: Jared Van Bortel <jared@nomic.ai>
This commit is contained in:
Jared Van Bortel 2024-12-13 15:19:40 -05:00 committed by GitHub
parent b7df4ebbcb
commit 03f7ca4409
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 444 additions and 373 deletions

View File

@ -11,6 +11,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- Fix API server replying with incorrect token counts and stop reason after v3.5.0 ([#3256](https://github.com/nomic-ai/gpt4all/pull/3256))
- Fix API server remembering previous, unrelated conversations after v3.5.0 ([#3256](https://github.com/nomic-ai/gpt4all/pull/3256))
- Fix mishandling of default chat template and system message of cloned models in v3.5.0 ([#3262](https://github.com/nomic-ai/gpt4all/pull/3262))
- Fix untranslated text on the startup dialog ([#3293](https://github.com/nomic-ai/gpt4all/pull/3293))
## [3.5.1] - 2024-12-10

View File

@ -115,7 +115,7 @@ model release that uses your data!")
anchors.right: parent.right
Label {
id: optInStatistics
text: "Opt-in to anonymous usage analytics used to improve GPT4All"
text: qsTr("Opt-in to anonymous usage analytics used to improve GPT4All")
Layout.row: 0
Layout.column: 0
color: theme.textColor
@ -229,7 +229,7 @@ model release that uses your data!")
Label {
id: optInNetwork
text: "Opt-in to anonymous sharing of chats to the GPT4All Datalake"
text: qsTr("Opt-in to anonymous sharing of chats to the GPT4All Datalake")
Layout.row: 1
Layout.column: 0
color: theme.textColor

View File

@ -726,7 +726,7 @@
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="212"/>
<location filename="../qml/ChatItemView.qml" line="610"/>
<location filename="../qml/ChatItemView.qml" line="617"/>
<source>Copy</source>
<translation type="unfinished"></translation>
</message>
@ -805,17 +805,17 @@
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="632"/>
<location filename="../qml/ChatItemView.qml" line="639"/>
<source>Like response</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="661"/>
<location filename="../qml/ChatItemView.qml" line="668"/>
<source>Dislike response</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="724"/>
<location filename="../qml/ChatItemView.qml" line="731"/>
<source>Suggested follow-ups</source>
<translation type="unfinished"></translation>
</message>
@ -823,7 +823,7 @@
<context>
<name>ChatLLM</name>
<message>
<location filename="../src/chatllm.cpp" line="925"/>
<location filename="../src/chatllm.cpp" line="935"/>
<source>Your message was too long and could not be processed (%1 &gt; %2). Please try again with something shorter.</source>
<translation type="unfinished"></translation>
</message>
@ -954,68 +954,68 @@
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="752"/>
<location filename="../qml/ChatView.qml" line="753"/>
<source>Load the default model</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="753"/>
<location filename="../qml/ChatView.qml" line="754"/>
<source>Loads the default model which can be changed in settings</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="764"/>
<location filename="../qml/ChatView.qml" line="765"/>
<source>No Model Installed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="773"/>
<location filename="../qml/ChatView.qml" line="774"/>
<source>GPT4All requires that you install at least one
model to get started</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="785"/>
<location filename="../qml/ChatView.qml" line="786"/>
<source>Install a Model</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="790"/>
<location filename="../qml/ChatView.qml" line="791"/>
<source>Shows the add model view</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="815"/>
<location filename="../qml/ChatView.qml" line="816"/>
<source>Conversation with the model</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="816"/>
<location filename="../qml/ChatView.qml" line="817"/>
<source>prompt / response pairs from the conversation</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1092"/>
<location filename="../qml/ChatView.qml" line="1093"/>
<source>Legacy prompt template needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1096"/>
<location filename="../qml/ChatView.qml" line="1097"/>
<source>No &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; configured.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1099"/>
<location filename="../qml/ChatView.qml" line="1100"/>
<source>The &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; cannot be blank.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1102"/>
<location filename="../qml/ChatView.qml" line="1103"/>
<source>Legacy system prompt needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1290"/>
<location filename="../qml/ChatView.qml" line="1291"/>
<source>Copy</source>
<translation type="unfinished"></translation>
</message>
@ -1027,47 +1027,47 @@ model to get started</source>
</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="918"/>
<location filename="../qml/ChatView.qml" line="919"/>
<source>Erase and reset chat session</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="939"/>
<location filename="../qml/ChatView.qml" line="940"/>
<source>Copy chat session to clipboard</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1213"/>
<location filename="../qml/ChatView.qml" line="1214"/>
<source>Add media</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1214"/>
<location filename="../qml/ChatView.qml" line="1215"/>
<source>Adds media to the prompt</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1348"/>
<location filename="../qml/ChatView.qml" line="1349"/>
<source>Stop generating</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1349"/>
<location filename="../qml/ChatView.qml" line="1350"/>
<source>Stop the current response generation</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1392"/>
<location filename="../qml/ChatView.qml" line="1393"/>
<source>Attach</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1394"/>
<location filename="../qml/ChatView.qml" line="1395"/>
<source>Single File</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1004"/>
<location filename="../qml/ChatView.qml" line="1005"/>
<source>Reloads the model</source>
<translation type="unfinished"></translation>
</message>
@ -1089,7 +1089,7 @@ model to get started</source>
</message>
<message>
<location filename="../qml/ChatView.qml" line="391"/>
<location filename="../qml/ChatView.qml" line="1002"/>
<location filename="../qml/ChatView.qml" line="1003"/>
<source>Reload · %1</source>
<translation type="unfinished"></translation>
</message>
@ -1099,47 +1099,47 @@ model to get started</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="728"/>
<location filename="../qml/ChatView.qml" line="729"/>
<source>Load · %1 (default) </source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Send a message...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Load a model to continue...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1243"/>
<location filename="../qml/ChatView.qml" line="1244"/>
<source>Send messages/prompts to the model</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1284"/>
<location filename="../qml/ChatView.qml" line="1285"/>
<source>Cut</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1296"/>
<location filename="../qml/ChatView.qml" line="1297"/>
<source>Paste</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1300"/>
<location filename="../qml/ChatView.qml" line="1301"/>
<source>Select All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1370"/>
<location filename="../qml/ChatView.qml" line="1371"/>
<source>Send message</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1371"/>
<location filename="../qml/ChatView.qml" line="1372"/>
<source>Sends the message/prompt contained in textfield to the model</source>
<translation type="unfinished"></translation>
</message>
@ -1602,78 +1602,78 @@ model to get started</source>
<context>
<name>ModelList</name>
<message>
<location filename="../src/modellist.cpp" line="1632"/>
<location filename="../src/modellist.cpp" line="1672"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1651"/>
<location filename="../src/modellist.cpp" line="1691"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1679"/>
<location filename="../src/modellist.cpp" line="1720"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1710"/>
<location filename="../src/modellist.cpp" line="1752"/>
<source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1735"/>
<location filename="../src/modellist.cpp" line="1778"/>
<source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1761"/>
<location filename="../src/modellist.cpp" line="1805"/>
<source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1664"/>
<location filename="../src/modellist.cpp" line="1705"/>
<source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1270"/>
<location filename="../src/modellist.cpp" line="1321"/>
<location filename="../src/modellist.cpp" line="1309"/>
<location filename="../src/modellist.cpp" line="1360"/>
<source>cannot open &quot;%1&quot;: %2</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1282"/>
<location filename="../src/modellist.cpp" line="1321"/>
<source>cannot create &quot;%1&quot;: %2</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1332"/>
<location filename="../src/modellist.cpp" line="1371"/>
<source>%1 (%2)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1333"/>
<location filename="../src/modellist.cpp" line="1372"/>
<source>&lt;strong&gt;OpenAI-Compatible API Model&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;API Key: %1&lt;/li&gt;&lt;li&gt;Base URL: %2&lt;/li&gt;&lt;li&gt;Model Name: %3&lt;/li&gt;&lt;/ul&gt;</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1691"/>
<location filename="../src/modellist.cpp" line="1733"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1773"/>
<location filename="../src/modellist.cpp" line="1818"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal API key and the API base URL.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to the OpenAI-compatible API Server you specified!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with the OpenAI-compatible API Server&lt;/li&gt;</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1790"/>
<location filename="../src/modellist.cpp" line="1835"/>
<source>&lt;strong&gt;Connect to OpenAI-compatible API server&lt;/strong&gt;&lt;br&gt; %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="2213"/>
<location filename="../src/modellist.cpp" line="2259"/>
<source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source>
<translation type="unfinished"></translation>
</message>
@ -2429,6 +2429,11 @@ model release that uses your data!</source>
<source>Describes what will happen when you opt-in</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="118"/>
<source>Opt-in to anonymous usage analytics used to improve GPT4All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="124"/>
<location filename="../qml/StartupDialog.qml" line="150"/>
@ -2462,6 +2467,11 @@ model release that uses your data!</source>
<source>Allow opt-out for anonymous usage statistics</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="232"/>
<source>Opt-in to anonymous sharing of chats to the GPT4All Datalake</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="238"/>
<location filename="../qml/StartupDialog.qml" line="265"/>

View File

@ -738,7 +738,7 @@
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="212"/>
<location filename="../qml/ChatItemView.qml" line="610"/>
<location filename="../qml/ChatItemView.qml" line="617"/>
<source>Copy</source>
<translation type="unfinished">Copiar</translation>
</message>
@ -817,17 +817,17 @@
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="632"/>
<location filename="../qml/ChatItemView.qml" line="639"/>
<source>Like response</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="661"/>
<location filename="../qml/ChatItemView.qml" line="668"/>
<source>Dislike response</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="724"/>
<location filename="../qml/ChatItemView.qml" line="731"/>
<source>Suggested follow-ups</source>
<translation type="unfinished">Seguimientos sugeridos</translation>
</message>
@ -835,7 +835,7 @@
<context>
<name>ChatLLM</name>
<message>
<location filename="../src/chatllm.cpp" line="925"/>
<location filename="../src/chatllm.cpp" line="935"/>
<source>Your message was too long and could not be processed (%1 &gt; %2). Please try again with something shorter.</source>
<translation type="unfinished"></translation>
</message>
@ -974,37 +974,37 @@
<translation>agregar colecciones de documentos al chat</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="752"/>
<location filename="../qml/ChatView.qml" line="753"/>
<source>Load the default model</source>
<translation>Cargar el modelo predeterminado</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="753"/>
<location filename="../qml/ChatView.qml" line="754"/>
<source>Loads the default model which can be changed in settings</source>
<translation>Carga el modelo predeterminado que se puede cambiar en la configuración</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="764"/>
<location filename="../qml/ChatView.qml" line="765"/>
<source>No Model Installed</source>
<translation>No hay modelo instalado</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1092"/>
<location filename="../qml/ChatView.qml" line="1093"/>
<source>Legacy prompt template needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1096"/>
<location filename="../qml/ChatView.qml" line="1097"/>
<source>No &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; configured.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1099"/>
<location filename="../qml/ChatView.qml" line="1100"/>
<source>The &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; cannot be blank.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1102"/>
<location filename="../qml/ChatView.qml" line="1103"/>
<source>Legacy system prompt needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation type="unfinished"></translation>
</message>
@ -1025,22 +1025,22 @@
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="785"/>
<location filename="../qml/ChatView.qml" line="786"/>
<source>Install a Model</source>
<translation>Instalar un modelo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="790"/>
<location filename="../qml/ChatView.qml" line="791"/>
<source>Shows the add model view</source>
<translation>Muestra la vista de agregar modelo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="815"/>
<location filename="../qml/ChatView.qml" line="816"/>
<source>Conversation with the model</source>
<translation>Conversación con el modelo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="816"/>
<location filename="../qml/ChatView.qml" line="817"/>
<source>prompt / response pairs from the conversation</source>
<translation>pares de pregunta / respuesta de la conversación</translation>
</message>
@ -1069,7 +1069,7 @@
<translation type="vanished">generando preguntas ...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1290"/>
<location filename="../qml/ChatView.qml" line="1291"/>
<source>Copy</source>
<translation>Copiar</translation>
</message>
@ -1106,12 +1106,12 @@
<translation type="vanished">Seguimientos sugeridos</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="918"/>
<location filename="../qml/ChatView.qml" line="919"/>
<source>Erase and reset chat session</source>
<translation>Borrar y reiniciar sesión de chat</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="939"/>
<location filename="../qml/ChatView.qml" line="940"/>
<source>Copy chat session to clipboard</source>
<translation>Copiar sesión de chat al portapapeles</translation>
</message>
@ -1120,43 +1120,43 @@
<translation type="vanished">Rehacer última respuesta del chat</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1213"/>
<location filename="../qml/ChatView.qml" line="1214"/>
<source>Add media</source>
<translation>Agregar medios</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1214"/>
<location filename="../qml/ChatView.qml" line="1215"/>
<source>Adds media to the prompt</source>
<translation>Agrega medios al mensaje</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1348"/>
<location filename="../qml/ChatView.qml" line="1349"/>
<source>Stop generating</source>
<translation>Detener generación</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1349"/>
<location filename="../qml/ChatView.qml" line="1350"/>
<source>Stop the current response generation</source>
<translation>Detener la generación de la respuesta actual</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1392"/>
<location filename="../qml/ChatView.qml" line="1393"/>
<source>Attach</source>
<translation>Adjuntar</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1394"/>
<location filename="../qml/ChatView.qml" line="1395"/>
<source>Single File</source>
<translation>Fila india</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1004"/>
<location filename="../qml/ChatView.qml" line="1005"/>
<source>Reloads the model</source>
<translation>Recarga el modelo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="391"/>
<location filename="../qml/ChatView.qml" line="1002"/>
<location filename="../qml/ChatView.qml" line="1003"/>
<source>Reload · %1</source>
<translation>Recargar · %1</translation>
</message>
@ -1166,7 +1166,7 @@
<translation>Cargando · %1</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="728"/>
<location filename="../qml/ChatView.qml" line="729"/>
<source>Load · %1 (default) </source>
<translation>Cargar · %1 (predeterminado) </translation>
</message>
@ -1186,47 +1186,47 @@
</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Send a message...</source>
<translation>Enviar un mensaje...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Load a model to continue...</source>
<translation>Carga un modelo para continuar...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1243"/>
<location filename="../qml/ChatView.qml" line="1244"/>
<source>Send messages/prompts to the model</source>
<translation>Enviar mensajes/indicaciones al modelo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1284"/>
<location filename="../qml/ChatView.qml" line="1285"/>
<source>Cut</source>
<translation>Cortar</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1296"/>
<location filename="../qml/ChatView.qml" line="1297"/>
<source>Paste</source>
<translation>Pegar</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1300"/>
<location filename="../qml/ChatView.qml" line="1301"/>
<source>Select All</source>
<translation>Seleccionar todo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1370"/>
<location filename="../qml/ChatView.qml" line="1371"/>
<source>Send message</source>
<translation>Enviar mensaje</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1371"/>
<location filename="../qml/ChatView.qml" line="1372"/>
<source>Sends the message/prompt contained in textfield to the model</source>
<translation>Envía el mensaje/indicación contenido en el campo de texto al modelo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="773"/>
<location filename="../qml/ChatView.qml" line="774"/>
<source>GPT4All requires that you install at least one
model to get started</source>
<translation>GPT4All requiere que instale al menos un
@ -1696,78 +1696,78 @@ modelo para comenzar
<context>
<name>ModelList</name>
<message>
<location filename="../src/modellist.cpp" line="1632"/>
<location filename="../src/modellist.cpp" line="1672"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Requiere clave API personal de OpenAI.&lt;/li&gt;&lt;li&gt;ADVERTENCIA: ¡Enviará sus chats a OpenAI!&lt;/li&gt;&lt;li&gt;Su clave API se almacenará en el disco&lt;/li&gt;&lt;li&gt;Solo se usará para comunicarse con OpenAI&lt;/li&gt;&lt;li&gt;Puede solicitar una clave API &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;aquí.&lt;/a&gt;&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1651"/>
<location filename="../src/modellist.cpp" line="1691"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo ChatGPT GPT-3.5 Turbo de OpenAI&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1664"/>
<location filename="../src/modellist.cpp" line="1705"/>
<source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source>
<translation>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Aunque pagues a OpenAI por ChatGPT-4, esto no garantiza el acceso a la clave API. Contacta a OpenAI para más información.</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1679"/>
<location filename="../src/modellist.cpp" line="1720"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source>
<translation>&lt;strong&gt;Modelo ChatGPT GPT-4 de OpenAI&lt;/strong&gt;&lt;br&gt; %1 %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1691"/>
<location filename="../src/modellist.cpp" line="1733"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Requiere una clave API personal de Mistral.&lt;/li&gt;&lt;li&gt;ADVERTENCIA: ¡Enviará tus chats a Mistral!&lt;/li&gt;&lt;li&gt;Tu clave API se almacenará en el disco&lt;/li&gt;&lt;li&gt;Solo se usará para comunicarse con Mistral&lt;/li&gt;&lt;li&gt;Puedes solicitar una clave API &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;aquí&lt;/a&gt;.&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1710"/>
<location filename="../src/modellist.cpp" line="1752"/>
<source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo Mistral Tiny&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1735"/>
<location filename="../src/modellist.cpp" line="1778"/>
<source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo Mistral Small&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1761"/>
<location filename="../src/modellist.cpp" line="1805"/>
<source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo Mistral Medium&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="2213"/>
<location filename="../src/modellist.cpp" line="2259"/>
<source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;Creado por %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Publicado el %2.&lt;li&gt;Este modelo tiene %3 me gusta.&lt;li&gt;Este modelo tiene %4 descargas.&lt;li&gt;Más información puede encontrarse &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;aquí.&lt;/a&gt;&lt;/ul&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1332"/>
<location filename="../src/modellist.cpp" line="1371"/>
<source>%1 (%2)</source>
<translation>%1 (%2)</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1270"/>
<location filename="../src/modellist.cpp" line="1321"/>
<location filename="../src/modellist.cpp" line="1309"/>
<location filename="../src/modellist.cpp" line="1360"/>
<source>cannot open &quot;%1&quot;: %2</source>
<translation>no se puede abrir &quot;%1&quot;: %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1282"/>
<location filename="../src/modellist.cpp" line="1321"/>
<source>cannot create &quot;%1&quot;: %2</source>
<translation>no se puede crear &quot;%1&quot;: %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1333"/>
<location filename="../src/modellist.cpp" line="1372"/>
<source>&lt;strong&gt;OpenAI-Compatible API Model&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;API Key: %1&lt;/li&gt;&lt;li&gt;Base URL: %2&lt;/li&gt;&lt;li&gt;Model Name: %3&lt;/li&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;Modelo de API compatible con OpenAI&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Clave API: %1&lt;/li&gt;&lt;li&gt;URL base: %2&lt;/li&gt;&lt;li&gt;Nombre del modelo: %3&lt;/li&gt;&lt;/ul&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1773"/>
<location filename="../src/modellist.cpp" line="1818"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal API key and the API base URL.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to the OpenAI-compatible API Server you specified!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with the OpenAI-compatible API Server&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Requiere una clave API personal y la URL base de la API.&lt;/li&gt;&lt;li&gt;ADVERTENCIA: ¡Enviará sus chats al servidor de API compatible con OpenAI que especificó!&lt;/li&gt;&lt;li&gt;Su clave API se almacenará en el disco&lt;/li&gt;&lt;li&gt;Solo se utilizará para comunicarse con el servidor de API compatible con OpenAI&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1790"/>
<location filename="../src/modellist.cpp" line="1835"/>
<source>&lt;strong&gt;Connect to OpenAI-compatible API server&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Conectar al servidor de API compatible con OpenAI&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
@ -2546,6 +2546,11 @@ NOTA: Al activar esta función, estarás enviando tus datos al Datalake de Códi
<source>Describes what will happen when you opt-in</source>
<translation>Describe lo que sucederá cuando acepte</translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="118"/>
<source>Opt-in to anonymous usage analytics used to improve GPT4All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="124"/>
<location filename="../qml/StartupDialog.qml" line="150"/>
@ -2579,6 +2584,11 @@ NOTA: Al activar esta función, estarás enviando tus datos al Datalake de Códi
<source>Allow opt-out for anonymous usage statistics</source>
<translation>Permitir rechazo de estadísticas de uso anónimas</translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="232"/>
<source>Opt-in to anonymous sharing of chats to the GPT4All Datalake</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="238"/>
<location filename="../qml/StartupDialog.qml" line="265"/>

View File

@ -727,7 +727,7 @@
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="212"/>
<location filename="../qml/ChatItemView.qml" line="610"/>
<location filename="../qml/ChatItemView.qml" line="617"/>
<source>Copy</source>
<translation>Copia</translation>
</message>
@ -806,17 +806,17 @@
<translation>Ripeti</translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="632"/>
<location filename="../qml/ChatItemView.qml" line="639"/>
<source>Like response</source>
<translation>Mi piace la risposta</translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="661"/>
<location filename="../qml/ChatItemView.qml" line="668"/>
<source>Dislike response</source>
<translation>Non mi piace la risposta</translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="724"/>
<location filename="../qml/ChatItemView.qml" line="731"/>
<source>Suggested follow-ups</source>
<translation>Approfondimenti suggeriti</translation>
</message>
@ -824,7 +824,7 @@
<context>
<name>ChatLLM</name>
<message>
<location filename="../src/chatllm.cpp" line="925"/>
<location filename="../src/chatllm.cpp" line="935"/>
<source>Your message was too long and could not be processed (%1 &gt; %2). Please try again with something shorter.</source>
<translation>Il messaggio era troppo lungo e non è stato possibile elaborarlo (%1 &gt; %2). Riprova con un messaggio più breve.</translation>
</message>
@ -955,114 +955,114 @@
<translation>aggiungi raccolte di documenti alla chat</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="752"/>
<location filename="../qml/ChatView.qml" line="753"/>
<source>Load the default model</source>
<translation>Carica il modello predefinito</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="753"/>
<location filename="../qml/ChatView.qml" line="754"/>
<source>Loads the default model which can be changed in settings</source>
<translation>Carica il modello predefinito che può essere modificato nei settaggi</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="764"/>
<location filename="../qml/ChatView.qml" line="765"/>
<source>No Model Installed</source>
<translation>Nessun modello installato</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="773"/>
<location filename="../qml/ChatView.qml" line="774"/>
<source>GPT4All requires that you install at least one
model to get started</source>
<translation>GPT4All richiede l&apos;installazione di almeno un
modello per iniziare</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="785"/>
<location filename="../qml/ChatView.qml" line="786"/>
<source>Install a Model</source>
<translation>Installa un modello</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="790"/>
<location filename="../qml/ChatView.qml" line="791"/>
<source>Shows the add model view</source>
<translation>Mostra la vista aggiungi modello</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="815"/>
<location filename="../qml/ChatView.qml" line="816"/>
<source>Conversation with the model</source>
<translation>Conversazione con il modello</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="816"/>
<location filename="../qml/ChatView.qml" line="817"/>
<source>prompt / response pairs from the conversation</source>
<translation>coppie prompt/risposta dalla conversazione</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1092"/>
<location filename="../qml/ChatView.qml" line="1093"/>
<source>Legacy prompt template needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation>Il modello di prompt precedente deve essere &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;aggiornato&lt;/a&gt; nei Settaggi.</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1096"/>
<location filename="../qml/ChatView.qml" line="1097"/>
<source>No &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; configured.</source>
<translation>Nessun &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;modello di chat&lt;/a&gt; configurato.</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1099"/>
<location filename="../qml/ChatView.qml" line="1100"/>
<source>The &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; cannot be blank.</source>
<translation>Il &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;modello di chat&lt;/a&gt; non può essere vuoto.</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1102"/>
<location filename="../qml/ChatView.qml" line="1103"/>
<source>Legacy system prompt needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation>Il prompt del sistema precedente deve essere &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;aggiornato&lt;/a&gt; nei Settaggi.</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1290"/>
<location filename="../qml/ChatView.qml" line="1291"/>
<source>Copy</source>
<translation>Copia</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="918"/>
<location filename="../qml/ChatView.qml" line="919"/>
<source>Erase and reset chat session</source>
<translation>Cancella e ripristina la sessione di chat</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="939"/>
<location filename="../qml/ChatView.qml" line="940"/>
<source>Copy chat session to clipboard</source>
<translation>Copia la sessione di chat negli appunti</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1213"/>
<location filename="../qml/ChatView.qml" line="1214"/>
<source>Add media</source>
<translation>Aggiungi contenuti multimediali</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1214"/>
<location filename="../qml/ChatView.qml" line="1215"/>
<source>Adds media to the prompt</source>
<translation>Aggiunge contenuti multimediali al prompt</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1348"/>
<location filename="../qml/ChatView.qml" line="1349"/>
<source>Stop generating</source>
<translation>Interrompi la generazione</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1349"/>
<location filename="../qml/ChatView.qml" line="1350"/>
<source>Stop the current response generation</source>
<translation>Arresta la generazione della risposta corrente</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1392"/>
<location filename="../qml/ChatView.qml" line="1393"/>
<source>Attach</source>
<translation>Allegare</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1394"/>
<location filename="../qml/ChatView.qml" line="1395"/>
<source>Single File</source>
<translation>File singolo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1004"/>
<location filename="../qml/ChatView.qml" line="1005"/>
<source>Reloads the model</source>
<translation>Ricarica il modello</translation>
</message>
@ -1084,7 +1084,7 @@ modello per iniziare</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="391"/>
<location filename="../qml/ChatView.qml" line="1002"/>
<location filename="../qml/ChatView.qml" line="1003"/>
<source>Reload · %1</source>
<translation>Ricarica · %1</translation>
</message>
@ -1094,47 +1094,47 @@ modello per iniziare</translation>
<translation>Caricamento · %1</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="728"/>
<location filename="../qml/ChatView.qml" line="729"/>
<source>Load · %1 (default) </source>
<translation>Carica · %1 (predefinito) </translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Send a message...</source>
<translation>Manda un messaggio...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Load a model to continue...</source>
<translation>Carica un modello per continuare...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1243"/>
<location filename="../qml/ChatView.qml" line="1244"/>
<source>Send messages/prompts to the model</source>
<translation>Invia messaggi/prompt al modello</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1284"/>
<location filename="../qml/ChatView.qml" line="1285"/>
<source>Cut</source>
<translation>Taglia</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1296"/>
<location filename="../qml/ChatView.qml" line="1297"/>
<source>Paste</source>
<translation>Incolla</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1300"/>
<location filename="../qml/ChatView.qml" line="1301"/>
<source>Select All</source>
<translation>Seleziona tutto</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1370"/>
<location filename="../qml/ChatView.qml" line="1371"/>
<source>Send message</source>
<translation>Invia messaggio</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1371"/>
<location filename="../qml/ChatView.qml" line="1372"/>
<source>Sends the message/prompt contained in textfield to the model</source>
<translation>Invia il messaggio/prompt contenuto nel campo di testo al modello</translation>
</message>
@ -1598,78 +1598,78 @@ modello per iniziare</translation>
<context>
<name>ModelList</name>
<message>
<location filename="../src/modellist.cpp" line="1632"/>
<location filename="../src/modellist.cpp" line="1672"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Richiede una chiave API OpenAI personale.&lt;/li&gt;&lt;li&gt;ATTENZIONE: invierà le tue chat a OpenAI!&lt;/li&gt;&lt;li&gt;La tua chiave API verrà archiviata su disco&lt;/li&gt;&lt;li&gt; Verrà utilizzato solo per comunicare con OpenAI&lt;/li&gt;&lt;li&gt;Puoi richiedere una chiave API &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;qui.&lt;/a&gt; &lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1651"/>
<location filename="../src/modellist.cpp" line="1691"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source>
<translation></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1679"/>
<location filename="../src/modellist.cpp" line="1720"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source>
<translation></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1710"/>
<location filename="../src/modellist.cpp" line="1752"/>
<source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1735"/>
<location filename="../src/modellist.cpp" line="1778"/>
<source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1761"/>
<location filename="../src/modellist.cpp" line="1805"/>
<source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation></translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1664"/>
<location filename="../src/modellist.cpp" line="1705"/>
<source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source>
<translation>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Anche se paghi OpenAI per ChatGPT-4 questo non garantisce l&apos;accesso alla chiave API. Contatta OpenAI per maggiori informazioni.</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1270"/>
<location filename="../src/modellist.cpp" line="1321"/>
<location filename="../src/modellist.cpp" line="1309"/>
<location filename="../src/modellist.cpp" line="1360"/>
<source>cannot open &quot;%1&quot;: %2</source>
<translation>impossibile aprire &quot;%1&quot;: %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1282"/>
<location filename="../src/modellist.cpp" line="1321"/>
<source>cannot create &quot;%1&quot;: %2</source>
<translation>impossibile creare &quot;%1&quot;: %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1332"/>
<location filename="../src/modellist.cpp" line="1371"/>
<source>%1 (%2)</source>
<translation>%1 (%2)</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1333"/>
<location filename="../src/modellist.cpp" line="1372"/>
<source>&lt;strong&gt;OpenAI-Compatible API Model&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;API Key: %1&lt;/li&gt;&lt;li&gt;Base URL: %2&lt;/li&gt;&lt;li&gt;Model Name: %3&lt;/li&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;Modello API compatibile con OpenAI&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Chiave API: %1&lt;/li&gt;&lt;li&gt;URL di base: %2&lt;/li&gt;&lt;li&gt;Nome modello: %3&lt;/li&gt;&lt;/ul&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1691"/>
<location filename="../src/modellist.cpp" line="1733"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Richiede una chiave API Mistral personale.&lt;/li&gt;&lt;li&gt;ATTENZIONE: invierà le tue chat a Mistral!&lt;/li&gt;&lt;li&gt;La tua chiave API verrà archiviata su disco&lt;/li&gt;&lt;li&gt; Verrà utilizzato solo per comunicare con Mistral&lt;/li&gt;&lt;li&gt;Puoi richiedere una chiave API &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;qui&lt;/a&gt;. &lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1773"/>
<location filename="../src/modellist.cpp" line="1818"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal API key and the API base URL.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to the OpenAI-compatible API Server you specified!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with the OpenAI-compatible API Server&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Richiede una chiave API personale e l&apos;URL di base dell&apos;API.&lt;/li&gt;&lt;li&gt;ATTENZIONE: invierà le tue chat al server API compatibile con OpenAI che hai specificato!&lt;/li&gt;&lt;li&gt;La tua chiave API verrà archiviata su disco&lt;/li&gt;&lt;li&gt;Verrà utilizzata solo per comunicare con il server API compatibile con OpenAI&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1790"/>
<location filename="../src/modellist.cpp" line="1835"/>
<source>&lt;strong&gt;Connect to OpenAI-compatible API server&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Connetti al server API compatibile con OpenAI&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="2213"/>
<location filename="../src/modellist.cpp" line="2259"/>
<source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;Creato da %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Pubblicato il %2.&lt;li&gt;Questo modello ha %3 Mi piace.&lt;li&gt;Questo modello ha %4 download.&lt;li&gt;Altro informazioni possono essere trovate &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;qui.&lt;/a&gt;&lt;/ul&gt;</translation>
</message>
@ -2444,6 +2444,11 @@ NOTA: attivando questa funzione, invierai i tuoi dati al Datalake Open Source di
<source>Describes what will happen when you opt-in</source>
<translation>Descrive cosa accadrà quando effettuerai l&apos;adesione</translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="118"/>
<source>Opt-in to anonymous usage analytics used to improve GPT4All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="124"/>
<location filename="../qml/StartupDialog.qml" line="150"/>
@ -2477,6 +2482,11 @@ NOTA: attivando questa funzione, invierai i tuoi dati al Datalake Open Source di
<source>Allow opt-out for anonymous usage statistics</source>
<translation>Consenti la disattivazione per le statistiche di utilizzo anonime</translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="232"/>
<source>Opt-in to anonymous sharing of chats to the GPT4All Datalake</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="238"/>
<location filename="../qml/StartupDialog.qml" line="265"/>

View File

@ -740,7 +740,7 @@
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="212"/>
<location filename="../qml/ChatItemView.qml" line="610"/>
<location filename="../qml/ChatItemView.qml" line="617"/>
<source>Copy</source>
<translation type="unfinished">Copiar</translation>
</message>
@ -819,17 +819,17 @@
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="632"/>
<location filename="../qml/ChatItemView.qml" line="639"/>
<source>Like response</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="661"/>
<location filename="../qml/ChatItemView.qml" line="668"/>
<source>Dislike response</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="724"/>
<location filename="../qml/ChatItemView.qml" line="731"/>
<source>Suggested follow-ups</source>
<translation type="unfinished">Perguntas relacionadas</translation>
</message>
@ -837,7 +837,7 @@
<context>
<name>ChatLLM</name>
<message>
<location filename="../src/chatllm.cpp" line="925"/>
<location filename="../src/chatllm.cpp" line="935"/>
<source>Your message was too long and could not be processed (%1 &gt; %2). Please try again with something shorter.</source>
<translation type="unfinished"></translation>
</message>
@ -976,64 +976,64 @@
<translation>Adicionar Coleção de Documentos</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="752"/>
<location filename="../qml/ChatView.qml" line="753"/>
<source>Load the default model</source>
<translation>Carregar o modelo padrão</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="753"/>
<location filename="../qml/ChatView.qml" line="754"/>
<source>Loads the default model which can be changed in settings</source>
<translation>Carrega o modelo padrão (personalizável nas configurações)</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="764"/>
<location filename="../qml/ChatView.qml" line="765"/>
<source>No Model Installed</source>
<translation>Nenhum Modelo Instalado</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="773"/>
<location filename="../qml/ChatView.qml" line="774"/>
<source>GPT4All requires that you install at least one
model to get started</source>
<translation>O GPT4All precisa de pelo menos um modelo
modelo instalado para funcionar</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="785"/>
<location filename="../qml/ChatView.qml" line="786"/>
<source>Install a Model</source>
<translation>Instalar um Modelo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="790"/>
<location filename="../qml/ChatView.qml" line="791"/>
<source>Shows the add model view</source>
<translation>Mostra a visualização para adicionar modelo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="815"/>
<location filename="../qml/ChatView.qml" line="816"/>
<source>Conversation with the model</source>
<translation>Conversa com o modelo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="816"/>
<location filename="../qml/ChatView.qml" line="817"/>
<source>prompt / response pairs from the conversation</source>
<translation>Pares de pergunta/resposta da conversa</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1092"/>
<location filename="../qml/ChatView.qml" line="1093"/>
<source>Legacy prompt template needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1096"/>
<location filename="../qml/ChatView.qml" line="1097"/>
<source>No &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; configured.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1099"/>
<location filename="../qml/ChatView.qml" line="1100"/>
<source>The &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; cannot be blank.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1102"/>
<location filename="../qml/ChatView.qml" line="1103"/>
<source>Legacy system prompt needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation type="unfinished"></translation>
</message>
@ -1062,7 +1062,7 @@ modelo instalado para funcionar</translation>
<translation type="vanished">gerando perguntas...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1290"/>
<location filename="../qml/ChatView.qml" line="1291"/>
<source>Copy</source>
<translation>Copiar</translation>
</message>
@ -1099,12 +1099,12 @@ modelo instalado para funcionar</translation>
<translation type="vanished">Perguntas relacionadas</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="918"/>
<location filename="../qml/ChatView.qml" line="919"/>
<source>Erase and reset chat session</source>
<translation>Apagar e redefinir sessão de chat</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="939"/>
<location filename="../qml/ChatView.qml" line="940"/>
<source>Copy chat session to clipboard</source>
<translation>Copiar histórico da conversa</translation>
</message>
@ -1113,37 +1113,37 @@ modelo instalado para funcionar</translation>
<translation type="vanished">Refazer última resposta</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1213"/>
<location filename="../qml/ChatView.qml" line="1214"/>
<source>Add media</source>
<translation>Adicionar mídia</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1214"/>
<location filename="../qml/ChatView.qml" line="1215"/>
<source>Adds media to the prompt</source>
<translation>Adiciona mídia ao prompt</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1348"/>
<location filename="../qml/ChatView.qml" line="1349"/>
<source>Stop generating</source>
<translation>Parar de gerar</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1349"/>
<location filename="../qml/ChatView.qml" line="1350"/>
<source>Stop the current response generation</source>
<translation>Parar a geração da resposta atual</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1392"/>
<location filename="../qml/ChatView.qml" line="1393"/>
<source>Attach</source>
<translation>Anexar</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1394"/>
<location filename="../qml/ChatView.qml" line="1395"/>
<source>Single File</source>
<translation>Arquivo Único</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1004"/>
<location filename="../qml/ChatView.qml" line="1005"/>
<source>Reloads the model</source>
<translation>Recarrega modelo</translation>
</message>
@ -1165,7 +1165,7 @@ modelo instalado para funcionar</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="391"/>
<location filename="../qml/ChatView.qml" line="1002"/>
<location filename="../qml/ChatView.qml" line="1003"/>
<source>Reload · %1</source>
<translation>Recarregar · %1</translation>
</message>
@ -1175,7 +1175,7 @@ modelo instalado para funcionar</translation>
<translation>Carregando · %1</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="728"/>
<location filename="../qml/ChatView.qml" line="729"/>
<source>Load · %1 (default) </source>
<translation>Carregar · %1 (padrão) </translation>
</message>
@ -1199,42 +1199,42 @@ modelo instalado para funcionar</translation>
</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Send a message...</source>
<translation>Enviar uma mensagem...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Load a model to continue...</source>
<translation>Carregue um modelo para continuar...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1243"/>
<location filename="../qml/ChatView.qml" line="1244"/>
<source>Send messages/prompts to the model</source>
<translation>Enviar mensagens/prompts para o modelo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1284"/>
<location filename="../qml/ChatView.qml" line="1285"/>
<source>Cut</source>
<translation>Recortar</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1296"/>
<location filename="../qml/ChatView.qml" line="1297"/>
<source>Paste</source>
<translation>Colar</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1300"/>
<location filename="../qml/ChatView.qml" line="1301"/>
<source>Select All</source>
<translation>Selecionar tudo</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1370"/>
<location filename="../qml/ChatView.qml" line="1371"/>
<source>Send message</source>
<translation>Enviar mensagem</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1371"/>
<location filename="../qml/ChatView.qml" line="1372"/>
<source>Sends the message/prompt contained in textfield to the model</source>
<translation>Envia a mensagem/prompt contida no campo de texto para o modelo</translation>
</message>
@ -1698,78 +1698,78 @@ modelo instalado para funcionar</translation>
<context>
<name>ModelList</name>
<message>
<location filename="../src/modellist.cpp" line="1632"/>
<location filename="../src/modellist.cpp" line="1672"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;É necessária uma chave de API da OpenAI.&lt;/li&gt;&lt;li&gt;AVISO: Seus chats serão enviados para a OpenAI!&lt;/li&gt;&lt;li&gt;Sua chave de API será armazenada localmente&lt;/li&gt;&lt;li&gt;Ela será usada apenas para comunicação com a OpenAI&lt;/li&gt;&lt;li&gt;Você pode solicitar uma chave de API &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;aqui.&lt;/a&gt;&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1651"/>
<location filename="../src/modellist.cpp" line="1691"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo ChatGPT GPT-3.5 Turbo da OpenAI&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1679"/>
<location filename="../src/modellist.cpp" line="1720"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source>
<translation>&lt;strong&gt;Modelo ChatGPT GPT-4 da OpenAI&lt;/strong&gt;&lt;br&gt; %1 %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1710"/>
<location filename="../src/modellist.cpp" line="1752"/>
<source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo Mistral Tiny&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1735"/>
<location filename="../src/modellist.cpp" line="1778"/>
<source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo Mistral Small&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1761"/>
<location filename="../src/modellist.cpp" line="1805"/>
<source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelo Mistral Medium&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1664"/>
<location filename="../src/modellist.cpp" line="1705"/>
<source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source>
<translation>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Mesmo que você pague pelo ChatGPT-4 da OpenAI, isso não garante acesso à chave de API. Contate a OpenAI para mais informações.</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1270"/>
<location filename="../src/modellist.cpp" line="1321"/>
<location filename="../src/modellist.cpp" line="1309"/>
<location filename="../src/modellist.cpp" line="1360"/>
<source>cannot open &quot;%1&quot;: %2</source>
<translation>não é possível abrir &quot;%1&quot;: %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1282"/>
<location filename="../src/modellist.cpp" line="1321"/>
<source>cannot create &quot;%1&quot;: %2</source>
<translation>não é possível criar &quot;%1&quot;: %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1332"/>
<location filename="../src/modellist.cpp" line="1371"/>
<source>%1 (%2)</source>
<translation>%1 (%2)</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1333"/>
<location filename="../src/modellist.cpp" line="1372"/>
<source>&lt;strong&gt;OpenAI-Compatible API Model&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;API Key: %1&lt;/li&gt;&lt;li&gt;Base URL: %2&lt;/li&gt;&lt;li&gt;Model Name: %3&lt;/li&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;Modelo de API Compatível com OpenAI&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Chave da API: %1&lt;/li&gt;&lt;li&gt;URL Base: %2&lt;/li&gt;&lt;li&gt;Nome do Modelo: %3&lt;/li&gt;&lt;/ul&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1691"/>
<location filename="../src/modellist.cpp" line="1733"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;É necessária uma chave de API da Mistral.&lt;/li&gt;&lt;li&gt;AVISO: Seus chats serão enviados para a Mistral!&lt;/li&gt;&lt;li&gt;Sua chave de API será armazenada localmente&lt;/li&gt;&lt;li&gt;Ela será usada apenas para comunicação com a Mistral&lt;/li&gt;&lt;li&gt;Você pode solicitar uma chave de API &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;aqui&lt;/a&gt;.&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1773"/>
<location filename="../src/modellist.cpp" line="1818"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal API key and the API base URL.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to the OpenAI-compatible API Server you specified!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with the OpenAI-compatible API Server&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;É necessária uma chave de API e a URL da API.&lt;/li&gt;&lt;li&gt;AVISO: Seus chats serão enviados para o servidor de API compatível com OpenAI que você especificou!&lt;/li&gt;&lt;li&gt;Sua chave de API será armazenada no disco&lt;/li&gt;&lt;li&gt;Será usada apenas para comunicação com o servidor de API compatível com OpenAI&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1790"/>
<location filename="../src/modellist.cpp" line="1835"/>
<source>&lt;strong&gt;Connect to OpenAI-compatible API server&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Conectar a um servidor de API compatível com OpenAI&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="2213"/>
<location filename="../src/modellist.cpp" line="2259"/>
<source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;Criado por %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Publicado em %2.&lt;li&gt;Este modelo tem %3 curtidas.&lt;li&gt;Este modelo tem %4 downloads.&lt;li&gt;Mais informações podem ser encontradas &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;aqui.&lt;/a&gt;&lt;/ul&gt;</translation>
</message>
@ -2581,6 +2581,11 @@ versão do modelo GPT4All que utilize seus dados!</translation>
<source>Describes what will happen when you opt-in</source>
<translation>Descrição do que acontece ao participar</translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="118"/>
<source>Opt-in to anonymous usage analytics used to improve GPT4All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="124"/>
<location filename="../qml/StartupDialog.qml" line="150"/>
@ -2614,6 +2619,11 @@ versão do modelo GPT4All que utilize seus dados!</translation>
<source>Allow opt-out for anonymous usage statistics</source>
<translation>Permitir recusar envio de estatísticas de uso anônimas</translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="232"/>
<source>Opt-in to anonymous sharing of chats to the GPT4All Datalake</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="238"/>
<location filename="../qml/StartupDialog.qml" line="265"/>

View File

@ -743,7 +743,7 @@
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="212"/>
<location filename="../qml/ChatItemView.qml" line="610"/>
<location filename="../qml/ChatItemView.qml" line="617"/>
<source>Copy</source>
<translation>Copiere</translation>
</message>
@ -823,17 +823,17 @@
<translation>Refacere</translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="632"/>
<location filename="../qml/ChatItemView.qml" line="639"/>
<source>Like response</source>
<translation>Imi Place râspunsul</translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="661"/>
<location filename="../qml/ChatItemView.qml" line="668"/>
<source>Dislike response</source>
<translation>NU Îmi Place râspunsul</translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="724"/>
<location filename="../qml/ChatItemView.qml" line="731"/>
<source>Suggested follow-ups</source>
<translation>Continuări sugerate</translation>
</message>
@ -841,7 +841,7 @@
<context>
<name>ChatLLM</name>
<message>
<location filename="../src/chatllm.cpp" line="925"/>
<location filename="../src/chatllm.cpp" line="935"/>
<source>Your message was too long and could not be processed (%1 &gt; %2). Please try again with something shorter.</source>
<translation>Mesajul tău e prea lung şi nu poate fi procesat. (%1 &gt; %2). Încearca iar cu un mesaj mai scurt</translation>
</message>
@ -980,37 +980,37 @@
<translation>adaugă Colecţii de documente la conversaţie</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="752"/>
<location filename="../qml/ChatView.qml" line="753"/>
<source>Load the default model</source>
<translation>Încarcă modelul implicit</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="753"/>
<location filename="../qml/ChatView.qml" line="754"/>
<source>Loads the default model which can be changed in settings</source>
<translation>Încarcă modelul implicit care poate fi stabilit în Configurare</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="764"/>
<location filename="../qml/ChatView.qml" line="765"/>
<source>No Model Installed</source>
<translation>Niciun model instalat</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1092"/>
<location filename="../qml/ChatView.qml" line="1093"/>
<source>Legacy prompt template needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation>Vechiul Prompt-Template trebuie fie &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;actualizat&lt;/a&gt; în Configurare.</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1096"/>
<location filename="../qml/ChatView.qml" line="1097"/>
<source>No &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; configured.</source>
<translation>Nu e configurat niciun &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;model de conversaţie&lt;/a&gt;.</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1099"/>
<location filename="../qml/ChatView.qml" line="1100"/>
<source>The &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; cannot be blank.</source>
<translation>&lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;Modelul de conversaţie&lt;/a&gt; nu poate lipsi.</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1102"/>
<location filename="../qml/ChatView.qml" line="1103"/>
<source>Legacy system prompt needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation>Vechiul System Prompt trebuie fie &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;actualizat&lt;/a&gt; în Configurare.</translation>
</message>
@ -1045,28 +1045,28 @@
<translation>Schimbarea modelului va ŞTERGE conversaţia curenta.</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="773"/>
<location filename="../qml/ChatView.qml" line="774"/>
<source>GPT4All requires that you install at least one
model to get started</source>
<translation>GPT4All necesită cel puţin un model pentru a putea rula</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="785"/>
<location filename="../qml/ChatView.qml" line="786"/>
<source>Install a Model</source>
<translation>Instalează un model</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="790"/>
<location filename="../qml/ChatView.qml" line="791"/>
<source>Shows the add model view</source>
<translation>Afişează secţiunea de adăugare a unui model</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="815"/>
<location filename="../qml/ChatView.qml" line="816"/>
<source>Conversation with the model</source>
<translation>Conversaţie cu modelul</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="816"/>
<location filename="../qml/ChatView.qml" line="817"/>
<source>prompt / response pairs from the conversation</source>
<translation>perechi prompt/replică din conversaţie</translation>
</message>
@ -1095,7 +1095,7 @@ model to get started</source>
<translation type="vanished">se generează întrebări...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1290"/>
<location filename="../qml/ChatView.qml" line="1291"/>
<source>Copy</source>
<translation>Copiere</translation>
</message>
@ -1132,12 +1132,12 @@ model to get started</source>
<translation type="vanished">Continuări sugerate</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="918"/>
<location filename="../qml/ChatView.qml" line="919"/>
<source>Erase and reset chat session</source>
<translation>Şterge şi resetează sesiunea de chat</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="939"/>
<location filename="../qml/ChatView.qml" line="940"/>
<source>Copy chat session to clipboard</source>
<translation>Copiez sesiunea de chat (conversaţia) în Clipboard</translation>
</message>
@ -1146,37 +1146,37 @@ model to get started</source>
<translation type="vanished">Reface ultima replică</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1213"/>
<location filename="../qml/ChatView.qml" line="1214"/>
<source>Add media</source>
<translation>Adaugă media (un fişier)</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1214"/>
<location filename="../qml/ChatView.qml" line="1215"/>
<source>Adds media to the prompt</source>
<translation>Adaugă media (un fişier) la prompt</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1348"/>
<location filename="../qml/ChatView.qml" line="1349"/>
<source>Stop generating</source>
<translation>Opreşte generarea</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1349"/>
<location filename="../qml/ChatView.qml" line="1350"/>
<source>Stop the current response generation</source>
<translation>Opreşte generarea replicii curente</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1392"/>
<location filename="../qml/ChatView.qml" line="1393"/>
<source>Attach</source>
<translation>Ataşează</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1394"/>
<location filename="../qml/ChatView.qml" line="1395"/>
<source>Single File</source>
<translation>Un singur fişier</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1004"/>
<location filename="../qml/ChatView.qml" line="1005"/>
<source>Reloads the model</source>
<translation>Reîncarc modelul</translation>
</message>
@ -1212,7 +1212,7 @@ model to get started</source>
</message>
<message>
<location filename="../qml/ChatView.qml" line="391"/>
<location filename="../qml/ChatView.qml" line="1002"/>
<location filename="../qml/ChatView.qml" line="1003"/>
<source>Reload · %1</source>
<translation>Reîncărcare · %1</translation>
</message>
@ -1222,7 +1222,7 @@ model to get started</source>
<translation>Încărcare · %1</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="728"/>
<location filename="../qml/ChatView.qml" line="729"/>
<source>Load · %1 (default) </source>
<translation>Încarcă · %1 (implicit) </translation>
</message>
@ -1247,42 +1247,42 @@ model to get started</source>
</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Send a message...</source>
<translation>Trimite un mesaj...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Load a model to continue...</source>
<translation>Încarcă un model pentru a continua...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1243"/>
<location filename="../qml/ChatView.qml" line="1244"/>
<source>Send messages/prompts to the model</source>
<translation>Trimite mesaje/prompt-uri către model</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1284"/>
<location filename="../qml/ChatView.qml" line="1285"/>
<source>Cut</source>
<translation>Decupare (Cut)</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1296"/>
<location filename="../qml/ChatView.qml" line="1297"/>
<source>Paste</source>
<translation>Alipire (Paste)</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1300"/>
<location filename="../qml/ChatView.qml" line="1301"/>
<source>Select All</source>
<translation>Selectez tot</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1370"/>
<location filename="../qml/ChatView.qml" line="1371"/>
<source>Send message</source>
<translation>Trimit mesajul</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1371"/>
<location filename="../qml/ChatView.qml" line="1372"/>
<source>Sends the message/prompt contained in textfield to the model</source>
<translation>Trimite modelului mesajul/prompt-ul din câmpul-text</translation>
</message>
@ -1749,78 +1749,78 @@ model to get started</source>
<context>
<name>ModelList</name>
<message>
<location filename="../src/modellist.cpp" line="1270"/>
<location filename="../src/modellist.cpp" line="1321"/>
<location filename="../src/modellist.cpp" line="1309"/>
<location filename="../src/modellist.cpp" line="1360"/>
<source>cannot open &quot;%1&quot;: %2</source>
<translation>nu se poate deschide %1: %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1282"/>
<location filename="../src/modellist.cpp" line="1321"/>
<source>cannot create &quot;%1&quot;: %2</source>
<translation>nu se poate crea %1: %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1332"/>
<location filename="../src/modellist.cpp" line="1371"/>
<source>%1 (%2)</source>
<translation>%1 (%2)</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1333"/>
<location filename="../src/modellist.cpp" line="1372"/>
<source>&lt;strong&gt;OpenAI-Compatible API Model&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;API Key: %1&lt;/li&gt;&lt;li&gt;Base URL: %2&lt;/li&gt;&lt;li&gt;Model Name: %3&lt;/li&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;Model API compatibil cu OpenAI&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Cheia API: %1&lt;/li&gt;&lt;li&gt;Base URL: %2&lt;/li&gt;&lt;li&gt;Numele modelului: %3&lt;/li&gt;&lt;/ul&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1632"/>
<location filename="../src/modellist.cpp" line="1672"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Necesită o cheie API OpenAI personală. &lt;/li&gt;&lt;li&gt;ATENŢIE: Conversaţiile tale vor fi trimise la OpenAI!&lt;/li&gt;&lt;li&gt;Cheia ta API va fi stocată pe disc (local) &lt;/li&gt;&lt;li&gt;Va fi utilizată numai pentru comunicarea cu OpenAI&lt;/li&gt;&lt;li&gt;Poţi solicita o cheie API aici: &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;aici.&lt;/a&gt;&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1651"/>
<location filename="../src/modellist.cpp" line="1691"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelul OpenAI&apos;s ChatGPT GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1664"/>
<location filename="../src/modellist.cpp" line="1705"/>
<source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source>
<translation>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Chiar dacă plăteşti la OpenAI pentru ChatGPT-4, aceasta nu garantează accesul la cheia API. Contactează OpenAI pentru mai multe informaţii.</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1679"/>
<location filename="../src/modellist.cpp" line="1720"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source>
<translation>&lt;strong&gt;Modelul ChatGPT GPT-4 al OpenAI&lt;/strong&gt;&lt;br&gt; %1 %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1691"/>
<location filename="../src/modellist.cpp" line="1733"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Necesită cheia personală Mistral API. &lt;/li&gt;&lt;li&gt;ATENŢIE: Conversaţiile tale vor fi trimise la Mistral!&lt;/li&gt;&lt;li&gt;Cheia ta API va fi stocată pe disc (local)&lt;/li&gt;&lt;li&gt;Va fi utilizată numai pentru comunicarea cu Mistral&lt;/li&gt;&lt;li&gt;Poţi solicita o cheie API aici: &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;aici&lt;/a&gt;.&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1710"/>
<location filename="../src/modellist.cpp" line="1752"/>
<source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelul Mistral Tiny&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1735"/>
<location filename="../src/modellist.cpp" line="1778"/>
<source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelul Mistral Small&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1761"/>
<location filename="../src/modellist.cpp" line="1805"/>
<source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Modelul Mistral Medium&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1773"/>
<location filename="../src/modellist.cpp" line="1818"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal API key and the API base URL.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to the OpenAI-compatible API Server you specified!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with the OpenAI-compatible API Server&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Necesită cheia personală API si base-URL a API.&lt;/li&gt;&lt;li&gt;ATENŢIE: Conversaţiile tale vor fi trimise la serverul API compatibil cu OpenAI specificat!&lt;/li&gt;&lt;li&gt;Cheia ta API va fi stocată pe disc (local)&lt;/li&gt;&lt;li&gt;Va fi utilizată numai pentru comunicarea cu serverul API compatibil cu OpenAI&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1790"/>
<location filename="../src/modellist.cpp" line="1835"/>
<source>&lt;strong&gt;Connect to OpenAI-compatible API server&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Conectare la un server API compatibil cu OpenAI&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="2213"/>
<location filename="../src/modellist.cpp" line="2259"/>
<source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;Creat de către %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Publicat in: %2.&lt;li&gt;Acest model are %3 Likes.&lt;li&gt;Acest model are %4 download-uri.&lt;li&gt;Mai multe informaţii pot fi găsite la: &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;aici.&lt;/a&gt;&lt;/ul&gt;</translation>
</message>
@ -2675,6 +2675,11 @@ care foloseşte datele tale!</translation>
<source>Describes what will happen when you opt-in</source>
<translation>Descrie ce se întâmplă când participi</translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="118"/>
<source>Opt-in to anonymous usage analytics used to improve GPT4All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="124"/>
<location filename="../qml/StartupDialog.qml" line="150"/>
@ -2708,6 +2713,11 @@ care foloseşte datele tale!</translation>
<source>Allow opt-out for anonymous usage statistics</source>
<translation>Permite anularea participării la colectarea de statistici despre utilizare -anonimă-</translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="232"/>
<source>Opt-in to anonymous sharing of chats to the GPT4All Datalake</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="238"/>
<location filename="../qml/StartupDialog.qml" line="265"/>

View File

@ -742,7 +742,7 @@
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="212"/>
<location filename="../qml/ChatItemView.qml" line="610"/>
<location filename="../qml/ChatItemView.qml" line="617"/>
<source>Copy</source>
<translation type="unfinished"></translation>
</message>
@ -820,17 +820,17 @@
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="632"/>
<location filename="../qml/ChatItemView.qml" line="639"/>
<source>Like response</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="661"/>
<location filename="../qml/ChatItemView.qml" line="668"/>
<source>Dislike response</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="724"/>
<location filename="../qml/ChatItemView.qml" line="731"/>
<source>Suggested follow-ups</source>
<translation type="unfinished"></translation>
</message>
@ -838,7 +838,7 @@
<context>
<name>ChatLLM</name>
<message>
<location filename="../src/chatllm.cpp" line="925"/>
<location filename="../src/chatllm.cpp" line="935"/>
<source>Your message was too long and could not be processed (%1 &gt; %2). Please try again with something shorter.</source>
<translation type="unfinished"></translation>
</message>
@ -977,63 +977,63 @@
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="752"/>
<location filename="../qml/ChatView.qml" line="753"/>
<source>Load the default model</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="753"/>
<location filename="../qml/ChatView.qml" line="754"/>
<source>Loads the default model which can be changed in settings</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="764"/>
<location filename="../qml/ChatView.qml" line="765"/>
<source>No Model Installed</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="773"/>
<location filename="../qml/ChatView.qml" line="774"/>
<source>GPT4All requires that you install at least one
model to get started</source>
<translation>GPT4All要求您至少安装一个模型才能开始</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="785"/>
<location filename="../qml/ChatView.qml" line="786"/>
<source>Install a Model</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="790"/>
<location filename="../qml/ChatView.qml" line="791"/>
<source>Shows the add model view</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="815"/>
<location filename="../qml/ChatView.qml" line="816"/>
<source>Conversation with the model</source>
<translation>使</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="816"/>
<location filename="../qml/ChatView.qml" line="817"/>
<source>prompt / response pairs from the conversation</source>
<translation>/</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1092"/>
<location filename="../qml/ChatView.qml" line="1093"/>
<source>Legacy prompt template needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1096"/>
<location filename="../qml/ChatView.qml" line="1097"/>
<source>No &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; configured.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1099"/>
<location filename="../qml/ChatView.qml" line="1100"/>
<source>The &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; cannot be blank.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1102"/>
<location filename="../qml/ChatView.qml" line="1103"/>
<source>Legacy system prompt needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation type="unfinished"></translation>
</message>
@ -1062,7 +1062,7 @@ model to get started</source>
<translation type="vanished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1290"/>
<location filename="../qml/ChatView.qml" line="1291"/>
<source>Copy</source>
<translation></translation>
</message>
@ -1099,12 +1099,12 @@ model to get started</source>
<translation type="vanished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="918"/>
<location filename="../qml/ChatView.qml" line="919"/>
<source>Erase and reset chat session</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="939"/>
<location filename="../qml/ChatView.qml" line="940"/>
<source>Copy chat session to clipboard</source>
<translation></translation>
</message>
@ -1113,37 +1113,37 @@ model to get started</source>
<translation type="vanished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1213"/>
<location filename="../qml/ChatView.qml" line="1214"/>
<source>Add media</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1214"/>
<location filename="../qml/ChatView.qml" line="1215"/>
<source>Adds media to the prompt</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1348"/>
<location filename="../qml/ChatView.qml" line="1349"/>
<source>Stop generating</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1349"/>
<location filename="../qml/ChatView.qml" line="1350"/>
<source>Stop the current response generation</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1392"/>
<location filename="../qml/ChatView.qml" line="1393"/>
<source>Attach</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1394"/>
<location filename="../qml/ChatView.qml" line="1395"/>
<source>Single File</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1004"/>
<location filename="../qml/ChatView.qml" line="1005"/>
<source>Reloads the model</source>
<translation></translation>
</message>
@ -1165,7 +1165,7 @@ model to get started</source>
</message>
<message>
<location filename="../qml/ChatView.qml" line="391"/>
<location filename="../qml/ChatView.qml" line="1002"/>
<location filename="../qml/ChatView.qml" line="1003"/>
<source>Reload · %1</source>
<translation> · %1</translation>
</message>
@ -1175,7 +1175,7 @@ model to get started</source>
<translation> · %1</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="728"/>
<location filename="../qml/ChatView.qml" line="729"/>
<source>Load · %1 (default) </source>
<translation> · %1 () </translation>
</message>
@ -1198,42 +1198,42 @@ model to get started</source>
</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Send a message...</source>
<translation>...</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Load a model to continue...</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1243"/>
<location filename="../qml/ChatView.qml" line="1244"/>
<source>Send messages/prompts to the model</source>
<translation>/</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1284"/>
<location filename="../qml/ChatView.qml" line="1285"/>
<source>Cut</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1296"/>
<location filename="../qml/ChatView.qml" line="1297"/>
<source>Paste</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1300"/>
<location filename="../qml/ChatView.qml" line="1301"/>
<source>Select All</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1370"/>
<location filename="../qml/ChatView.qml" line="1371"/>
<source>Send message</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1371"/>
<location filename="../qml/ChatView.qml" line="1372"/>
<source>Sends the message/prompt contained in textfield to the model</source>
<translation>/</translation>
</message>
@ -1698,78 +1698,78 @@ model to get started</source>
<context>
<name>ModelList</name>
<message>
<location filename="../src/modellist.cpp" line="1270"/>
<location filename="../src/modellist.cpp" line="1321"/>
<location filename="../src/modellist.cpp" line="1309"/>
<location filename="../src/modellist.cpp" line="1360"/>
<source>cannot open &quot;%1&quot;: %2</source>
<translation>%1%2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1282"/>
<location filename="../src/modellist.cpp" line="1321"/>
<source>cannot create &quot;%1&quot;: %2</source>
<translation>%1%2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1332"/>
<location filename="../src/modellist.cpp" line="1371"/>
<source>%1 (%2)</source>
<translation>%1 (%2)</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1333"/>
<location filename="../src/modellist.cpp" line="1372"/>
<source>&lt;strong&gt;OpenAI-Compatible API Model&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;API Key: %1&lt;/li&gt;&lt;li&gt;Base URL: %2&lt;/li&gt;&lt;li&gt;Model Name: %3&lt;/li&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt; OpenAI API &lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;API %1&lt;/li&gt;&lt;li&gt; URL%2&lt;/li&gt;&lt;li&gt;%3&lt;/li&gt;&lt;/ul&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1632"/>
<location filename="../src/modellist.cpp" line="1672"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt; OpenAI API &lt;/li&gt;&lt;li&gt; OpenAI&lt;/li&gt;&lt;li&gt; API &lt;/li&gt;&lt;li&gt; OpenAI &lt;/li&gt;&lt;li&gt;&lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt; API &lt;/a&gt;&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1651"/>
<location filename="../src/modellist.cpp" line="1691"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1679"/>
<location filename="../src/modellist.cpp" line="1720"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source>
<translation>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1710"/>
<location filename="../src/modellist.cpp" line="1752"/>
<source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1735"/>
<location filename="../src/modellist.cpp" line="1778"/>
<source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1761"/>
<location filename="../src/modellist.cpp" line="1805"/>
<source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1773"/>
<location filename="../src/modellist.cpp" line="1818"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal API key and the API base URL.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to the OpenAI-compatible API Server you specified!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with the OpenAI-compatible API Server&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt; API API URL&lt;/li&gt;&lt;li&gt; OpenAI API &lt;/li&gt;&lt;li&gt; API &lt;/li&gt;&lt;li&gt; OpenAI API &lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1790"/>
<location filename="../src/modellist.cpp" line="1835"/>
<source>&lt;strong&gt;Connect to OpenAI-compatible API server&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt; OpenAI API &lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1664"/>
<location filename="../src/modellist.cpp" line="1705"/>
<source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source>
<translation>&lt;br&gt;&lt;br&gt;&lt;i&gt;* 使ChatGPT-4OpenAI付款API密钥访问OpenAI获取更多信息</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1691"/>
<location filename="../src/modellist.cpp" line="1733"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="2213"/>
<location filename="../src/modellist.cpp" line="2259"/>
<source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</translation>
</message>
@ -2573,6 +2573,11 @@ model release that uses your data!</source>
<source>Describes what will happen when you opt-in</source>
<translation></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="118"/>
<source>Opt-in to anonymous usage analytics used to improve GPT4All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="124"/>
<location filename="../qml/StartupDialog.qml" line="150"/>
@ -2606,6 +2611,11 @@ model release that uses your data!</source>
<source>Allow opt-out for anonymous usage statistics</source>
<translation>退使</translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="232"/>
<source>Opt-in to anonymous sharing of chats to the GPT4All Datalake</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="238"/>
<location filename="../qml/StartupDialog.qml" line="265"/>

View File

@ -735,7 +735,7 @@
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="212"/>
<location filename="../qml/ChatItemView.qml" line="610"/>
<location filename="../qml/ChatItemView.qml" line="617"/>
<source>Copy</source>
<translation type="unfinished"></translation>
</message>
@ -813,17 +813,17 @@
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="632"/>
<location filename="../qml/ChatItemView.qml" line="639"/>
<source>Like response</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="661"/>
<location filename="../qml/ChatItemView.qml" line="668"/>
<source>Dislike response</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatItemView.qml" line="724"/>
<location filename="../qml/ChatItemView.qml" line="731"/>
<source>Suggested follow-ups</source>
<translation type="unfinished"></translation>
</message>
@ -831,7 +831,7 @@
<context>
<name>ChatLLM</name>
<message>
<location filename="../src/chatllm.cpp" line="925"/>
<location filename="../src/chatllm.cpp" line="935"/>
<source>Your message was too long and could not be processed (%1 &gt; %2). Please try again with something shorter.</source>
<translation type="unfinished"></translation>
</message>
@ -951,7 +951,7 @@
</message>
<message>
<location filename="../qml/ChatView.qml" line="391"/>
<location filename="../qml/ChatView.qml" line="1002"/>
<location filename="../qml/ChatView.qml" line="1003"/>
<source>Reload · %1</source>
<translation> · %1</translation>
</message>
@ -961,27 +961,27 @@
<translation> · %1</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="728"/>
<location filename="../qml/ChatView.qml" line="729"/>
<source>Load · %1 (default) </source>
<translation> · %1 () </translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1092"/>
<location filename="../qml/ChatView.qml" line="1093"/>
<source>Legacy prompt template needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1096"/>
<location filename="../qml/ChatView.qml" line="1097"/>
<source>No &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; configured.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1099"/>
<location filename="../qml/ChatView.qml" line="1100"/>
<source>The &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;chat template&lt;/a&gt; cannot be blank.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1102"/>
<location filename="../qml/ChatView.qml" line="1103"/>
<source>Legacy system prompt needs to be &lt;a href=&quot;https://docs.gpt4all.io/gpt4all_desktop/chat_templates.html&quot;&gt;updated&lt;/a&gt; in Settings.</source>
<translation type="unfinished"></translation>
</message>
@ -1017,44 +1017,44 @@
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="752"/>
<location filename="../qml/ChatView.qml" line="753"/>
<source>Load the default model</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="753"/>
<location filename="../qml/ChatView.qml" line="754"/>
<source>Loads the default model which can be changed in settings</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="764"/>
<location filename="../qml/ChatView.qml" line="765"/>
<source>No Model Installed</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="773"/>
<location filename="../qml/ChatView.qml" line="774"/>
<source>GPT4All requires that you install at least one
model to get started</source>
<translation>GPT4All
</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="785"/>
<location filename="../qml/ChatView.qml" line="786"/>
<source>Install a Model</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="790"/>
<location filename="../qml/ChatView.qml" line="791"/>
<source>Shows the add model view</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="815"/>
<location filename="../qml/ChatView.qml" line="816"/>
<source>Conversation with the model</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="816"/>
<location filename="../qml/ChatView.qml" line="817"/>
<source>prompt / response pairs from the conversation</source>
<translation> / </translation>
</message>
@ -1091,7 +1091,7 @@ model to get started</source>
<translation type="vanished">......</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1290"/>
<location filename="../qml/ChatView.qml" line="1291"/>
<source>Copy</source>
<translation></translation>
</message>
@ -1128,12 +1128,12 @@ model to get started</source>
<translation type="vanished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="918"/>
<location filename="../qml/ChatView.qml" line="919"/>
<source>Erase and reset chat session</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="939"/>
<location filename="../qml/ChatView.qml" line="940"/>
<source>Copy chat session to clipboard</source>
<translation>簿</translation>
</message>
@ -1142,37 +1142,37 @@ model to get started</source>
<translation type="vanished"></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1213"/>
<location filename="../qml/ChatView.qml" line="1214"/>
<source>Add media</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1214"/>
<location filename="../qml/ChatView.qml" line="1215"/>
<source>Adds media to the prompt</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1348"/>
<location filename="../qml/ChatView.qml" line="1349"/>
<source>Stop generating</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1349"/>
<location filename="../qml/ChatView.qml" line="1350"/>
<source>Stop the current response generation</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1392"/>
<location filename="../qml/ChatView.qml" line="1393"/>
<source>Attach</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1394"/>
<location filename="../qml/ChatView.qml" line="1395"/>
<source>Single File</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1004"/>
<location filename="../qml/ChatView.qml" line="1005"/>
<source>Reloads the model</source>
<translation></translation>
</message>
@ -1192,42 +1192,42 @@ model to get started</source>
</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Send a message...</source>
<translation>......</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1240"/>
<location filename="../qml/ChatView.qml" line="1241"/>
<source>Load a model to continue...</source>
<translation>......</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1243"/>
<location filename="../qml/ChatView.qml" line="1244"/>
<source>Send messages/prompts to the model</source>
<translation>/</translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1284"/>
<location filename="../qml/ChatView.qml" line="1285"/>
<source>Cut</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1296"/>
<location filename="../qml/ChatView.qml" line="1297"/>
<source>Paste</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1300"/>
<location filename="../qml/ChatView.qml" line="1301"/>
<source>Select All</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1370"/>
<location filename="../qml/ChatView.qml" line="1371"/>
<source>Send message</source>
<translation></translation>
</message>
<message>
<location filename="../qml/ChatView.qml" line="1371"/>
<location filename="../qml/ChatView.qml" line="1372"/>
<source>Sends the message/prompt contained in textfield to the model</source>
<translation>/</translation>
</message>
@ -1686,78 +1686,78 @@ model to get started</source>
<context>
<name>ModelList</name>
<message>
<location filename="../src/modellist.cpp" line="1270"/>
<location filename="../src/modellist.cpp" line="1321"/>
<location filename="../src/modellist.cpp" line="1309"/>
<location filename="../src/modellist.cpp" line="1360"/>
<source>cannot open &quot;%1&quot;: %2</source>
<translation>%1%2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1282"/>
<location filename="../src/modellist.cpp" line="1321"/>
<source>cannot create &quot;%1&quot;: %2</source>
<translation>%1%2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1332"/>
<location filename="../src/modellist.cpp" line="1371"/>
<source>%1 (%2)</source>
<translation>%1%2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1333"/>
<location filename="../src/modellist.cpp" line="1372"/>
<source>&lt;strong&gt;OpenAI-Compatible API Model&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;API Key: %1&lt;/li&gt;&lt;li&gt;Base URL: %2&lt;/li&gt;&lt;li&gt;Model Name: %3&lt;/li&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;OpenAI API &lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;API %1&lt;/li&gt;&lt;li&gt; URL%2&lt;/li&gt;&lt;li&gt;%3&lt;/li&gt;&lt;/ul&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1632"/>
<location filename="../src/modellist.cpp" line="1672"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal OpenAI API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to OpenAI!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with OpenAI&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;here.&lt;/a&gt;&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt; OpenAI API &lt;/li&gt;&lt;li&gt; OpenAI&lt;/li&gt;&lt;li&gt; API &lt;/li&gt;&lt;li&gt; OpenAI &lt;/li&gt;&lt;li&gt;&lt;a href=&quot;https://platform.openai.com/account/api-keys&quot;&gt;&lt;/a&gt; API &lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1651"/>
<location filename="../src/modellist.cpp" line="1691"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;OpenAI ChatGPT GPT-3.5 Turbo&lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1664"/>
<location filename="../src/modellist.cpp" line="1705"/>
<source>&lt;br&gt;&lt;br&gt;&lt;i&gt;* Even if you pay OpenAI for ChatGPT-4 this does not guarantee API key access. Contact OpenAI for more info.</source>
<translation>&lt;br&gt;&lt;br&gt;&lt;i&gt;* 使 OpenAI ChatGPT GPT-4 使 API 使 OpenAI </translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1679"/>
<location filename="../src/modellist.cpp" line="1720"/>
<source>&lt;strong&gt;OpenAI&apos;s ChatGPT model GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</source>
<translation>&lt;strong&gt;OpenAI ChatGPT GPT-4&lt;/strong&gt;&lt;br&gt; %1 %2</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1691"/>
<location filename="../src/modellist.cpp" line="1733"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal Mistral API key.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to Mistral!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with Mistral&lt;/li&gt;&lt;li&gt;You can apply for an API key &lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;here&lt;/a&gt;.&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt; Mistral API &lt;/li&gt;&lt;li&gt; Mistral&lt;/li&gt;&lt;li&gt; API &lt;/li&gt;&lt;li&gt; Mistral &lt;/li&gt;&lt;li&gt;&lt;a href=&quot;https://console.mistral.ai/user/api-keys&quot;&gt;&lt;/a&gt; API &lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1710"/>
<location filename="../src/modellist.cpp" line="1752"/>
<source>&lt;strong&gt;Mistral Tiny model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Mistral &lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1735"/>
<location filename="../src/modellist.cpp" line="1778"/>
<source>&lt;strong&gt;Mistral Small model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Mistral &lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1761"/>
<location filename="../src/modellist.cpp" line="1805"/>
<source>&lt;strong&gt;Mistral Medium model&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt;Mistral &lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1773"/>
<location filename="../src/modellist.cpp" line="1818"/>
<source>&lt;ul&gt;&lt;li&gt;Requires personal API key and the API base URL.&lt;/li&gt;&lt;li&gt;WARNING: Will send your chats to the OpenAI-compatible API Server you specified!&lt;/li&gt;&lt;li&gt;Your API key will be stored on disk&lt;/li&gt;&lt;li&gt;Will only be used to communicate with the OpenAI-compatible API Server&lt;/li&gt;</source>
<translation>&lt;ul&gt;&lt;li&gt; API API URLBase URL&lt;/li&gt;&lt;li&gt; OpenAI API &lt;/li&gt;&lt;li&gt; API &lt;/li&gt;&lt;li&gt; OpenAI API &lt;/li&gt;</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="1790"/>
<location filename="../src/modellist.cpp" line="1835"/>
<source>&lt;strong&gt;Connect to OpenAI-compatible API server&lt;/strong&gt;&lt;br&gt; %1</source>
<translation>&lt;strong&gt; OpenAI API &lt;/strong&gt;&lt;br&gt; %1</translation>
</message>
<message>
<location filename="../src/modellist.cpp" line="2213"/>
<location filename="../src/modellist.cpp" line="2259"/>
<source>&lt;strong&gt;Created by %1.&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Published on %2.&lt;li&gt;This model has %3 likes.&lt;li&gt;This model has %4 downloads.&lt;li&gt;More info can be found &lt;a href=&quot;https://huggingface.co/%5&quot;&gt;here.&lt;/a&gt;&lt;/ul&gt;</source>
<translation>&lt;strong&gt;%1&lt;/strong&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;發佈日期:%2&lt;li&gt;累積讚數:%3 個讚&lt;li&gt;下載次數:%4 次&lt;li&gt;更多資訊請查閱&lt;a href=&quot;https://huggingface.co/%5&quot;&gt;&lt;/a&gt;&lt;/ul&gt;</translation>
</message>
@ -2553,6 +2553,11 @@ Nomic AI 將保留附加在您的資料上的所有署名訊息,並且您將
<source>Describes what will happen when you opt-in</source>
<translation></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="118"/>
<source>Opt-in to anonymous usage analytics used to improve GPT4All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="147"/>
<location filename="../qml/StartupDialog.qml" line="262"/>
@ -2597,6 +2602,11 @@ Nomic AI 將保留附加在您的資料上的所有署名訊息,並且您將
<source>Allow opt-out for anonymous usage statistics</source>
<translation>退使</translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="232"/>
<source>Opt-in to anonymous sharing of chats to the GPT4All Datalake</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/StartupDialog.qml" line="238"/>
<location filename="../qml/StartupDialog.qml" line="265"/>