llmodel: fix wrong and/or missing prompt callback type

Fix occurrences of the prompt callback being incorrectly specified, or
the response callback's prototype being incorrectly used in its place.

Signed-off-by: Juuso Alasuutari <juuso.alasuutari@gmail.com>
This commit is contained in:
Juuso Alasuutari
2023-05-21 22:43:45 +03:00
committed by AT
parent 8204c2eb80
commit 08ece43f0d
3 changed files with 6 additions and 5 deletions

View File

@@ -84,12 +84,13 @@ class LLModelPromptContext(ctypes.Structure):
("repeat_last_n", ctypes.c_int32),
("context_erase", ctypes.c_float)]
PromptCallback = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_int32)
ResponseCallback = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_int32, ctypes.c_char_p)
RecalculateCallback = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_bool)
llmodel.llmodel_prompt.argtypes = [ctypes.c_void_p,
ctypes.c_char_p,
ResponseCallback,
PromptCallback,
ResponseCallback,
RecalculateCallback,
ctypes.POINTER(LLModelPromptContext)]
@@ -218,7 +219,7 @@ class LLModel:
llmodel.llmodel_prompt(self.model,
prompt,
ResponseCallback(self._prompt_callback),
PromptCallback(self._prompt_callback),
ResponseCallback(self._response_callback),
RecalculateCallback(self._recalculate_callback),
context)
@@ -232,7 +233,7 @@ class LLModel:
# Empty prompt callback
@staticmethod
def _prompt_callback(token_id, response):
def _prompt_callback(token_id):
return True
# Empty response callback method that just prints response to be collected