mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-09-07 11:30:05 +00:00
add min_p sampling parameter (#2014)
Signed-off-by: Christopher Barrera <cb@arda.tx.rr.com> Co-authored-by: Jared Van Bortel <cebtenzzre@gmail.com>
This commit is contained in:
@@ -87,6 +87,7 @@ void MySettings::restoreModelDefaults(const ModelInfo &model)
|
||||
{
|
||||
setModelTemperature(model, model.m_temperature);
|
||||
setModelTopP(model, model.m_topP);
|
||||
setModelMinP(model, model.m_minP);
|
||||
setModelTopK(model, model.m_topK);;
|
||||
setModelMaxLength(model, model.m_maxLength);
|
||||
setModelPromptBatchSize(model, model.m_promptBatchSize);
|
||||
@@ -201,6 +202,13 @@ double MySettings::modelTopP(const ModelInfo &m) const
|
||||
return setting.value(QString("model-%1").arg(m.id()) + "/topP", m.m_topP).toDouble();
|
||||
}
|
||||
|
||||
double MySettings::modelMinP(const ModelInfo &m) const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value(QString("model-%1").arg(m.id()) + "/minP", m.m_minP).toDouble();
|
||||
}
|
||||
|
||||
void MySettings::setModelTopP(const ModelInfo &m, double p, bool force)
|
||||
{
|
||||
if (modelTopP(m) == p && !force)
|
||||
@@ -216,6 +224,21 @@ void MySettings::setModelTopP(const ModelInfo &m, double p, bool force)
|
||||
emit topPChanged(m);
|
||||
}
|
||||
|
||||
void MySettings::setModelMinP(const ModelInfo &m, double p, bool force)
|
||||
{
|
||||
if (modelMinP(m) == p && !force)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
if (m.m_minP == p && !m.isClone)
|
||||
setting.remove(QString("model-%1").arg(m.id()) + "/minP");
|
||||
else
|
||||
setting.setValue(QString("model-%1").arg(m.id()) + "/minP", p);
|
||||
setting.sync();
|
||||
if (!force)
|
||||
emit minPChanged(m);
|
||||
}
|
||||
|
||||
int MySettings::modelTopK(const ModelInfo &m) const
|
||||
{
|
||||
QSettings setting;
|
||||
|
Reference in New Issue
Block a user