mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-09-02 00:57:09 +00:00
add min_p sampling parameter (#2014)
Signed-off-by: Christopher Barrera <cb@arda.tx.rr.com> Co-authored-by: Jared Van Bortel <cebtenzzre@gmail.com>
This commit is contained in:
@@ -248,6 +248,7 @@ Napi::Value NodeModelWrapper::GetRequiredMemory(const Napi::CallbackInfo& info)
|
||||
.n_predict = 128,
|
||||
.top_k = 40,
|
||||
.top_p = 0.9f,
|
||||
.min_p = 0.0f,
|
||||
.temp = 0.72f,
|
||||
.n_batch = 8,
|
||||
.repeat_penalty = 1.0f,
|
||||
@@ -277,6 +278,8 @@ Napi::Value NodeModelWrapper::GetRequiredMemory(const Napi::CallbackInfo& info)
|
||||
promptContext.top_k = inputObject.Get("top_k").As<Napi::Number>().Int32Value();
|
||||
if(inputObject.Has("top_p"))
|
||||
promptContext.top_p = inputObject.Get("top_p").As<Napi::Number>().FloatValue();
|
||||
if(inputObject.Has("min_p"))
|
||||
promptContext.min_p = inputObject.Get("min_p").As<Napi::Number>().FloatValue();
|
||||
if(inputObject.Has("temp"))
|
||||
promptContext.temp = inputObject.Get("temp").As<Napi::Number>().FloatValue();
|
||||
if(inputObject.Has("n_batch"))
|
||||
|
Reference in New Issue
Block a user