mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-09-17 16:28:20 +00:00
Implement configurable context length (#1749)
This commit is contained in:
@@ -188,7 +188,7 @@ public class LLModel : ILLModel
|
||||
/// <returns>true if the model was loaded successfully, false otherwise.</returns>
|
||||
public bool Load(string modelPath)
|
||||
{
|
||||
return NativeMethods.llmodel_loadModel(_handle, modelPath);
|
||||
return NativeMethods.llmodel_loadModel(_handle, modelPath, 2048);
|
||||
}
|
||||
|
||||
protected void Destroy()
|
||||
|
@@ -70,7 +70,8 @@ internal static unsafe partial class NativeMethods
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
public static extern bool llmodel_loadModel(
|
||||
[NativeTypeName("llmodel_model")] IntPtr model,
|
||||
[NativeTypeName("const char *")][MarshalAs(UnmanagedType.LPUTF8Str)] string model_path);
|
||||
[NativeTypeName("const char *")][MarshalAs(UnmanagedType.LPUTF8Str)] string model_path,
|
||||
[NativeTypeName("int32_t")] int n_ctx);
|
||||
|
||||
[DllImport("libllmodel", CallingConvention = CallingConvention.Cdecl, ExactSpelling = true)]
|
||||
|
||||
|
@@ -39,7 +39,7 @@ public class Gpt4AllModelFactory : IGpt4AllModelFactory
|
||||
var handle = NativeMethods.llmodel_model_create2(modelPath, "auto", out error);
|
||||
_logger.LogDebug("Model created handle=0x{ModelHandle:X8}", handle);
|
||||
_logger.LogInformation("Model loading started");
|
||||
var loadedSuccessfully = NativeMethods.llmodel_loadModel(handle, modelPath);
|
||||
var loadedSuccessfully = NativeMethods.llmodel_loadModel(handle, modelPath, 2048);
|
||||
_logger.LogInformation("Model loading completed success={ModelLoadSuccess}", loadedSuccessfully);
|
||||
if (!loadedSuccessfully)
|
||||
{
|
||||
|
Reference in New Issue
Block a user