mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-06-22 21:48:23 +00:00
llmodel: fix wrong and/or missing prompt callback type
Fix occurrences of the prompt callback being incorrectly specified, or the response callback's prototype being incorrectly used in its place. Signed-off-by: Juuso Alasuutari <juuso.alasuutari@gmail.com>
This commit is contained in:
parent
8204c2eb80
commit
08ece43f0d
@ -123,7 +123,7 @@ bool recalculate_wrapper(bool is_recalculating, void *user_data) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void llmodel_prompt(llmodel_model model, const char *prompt,
|
void llmodel_prompt(llmodel_model model, const char *prompt,
|
||||||
llmodel_response_callback prompt_callback,
|
llmodel_prompt_callback prompt_callback,
|
||||||
llmodel_response_callback response_callback,
|
llmodel_response_callback response_callback,
|
||||||
llmodel_recalculate_callback recalculate_callback,
|
llmodel_recalculate_callback recalculate_callback,
|
||||||
llmodel_prompt_context *ctx)
|
llmodel_prompt_context *ctx)
|
||||||
|
@ -162,7 +162,7 @@ uint64_t llmodel_restore_state_data(llmodel_model model, const uint8_t *src);
|
|||||||
* @param ctx A pointer to the llmodel_prompt_context structure.
|
* @param ctx A pointer to the llmodel_prompt_context structure.
|
||||||
*/
|
*/
|
||||||
void llmodel_prompt(llmodel_model model, const char *prompt,
|
void llmodel_prompt(llmodel_model model, const char *prompt,
|
||||||
llmodel_response_callback prompt_callback,
|
llmodel_prompt_callback prompt_callback,
|
||||||
llmodel_response_callback response_callback,
|
llmodel_response_callback response_callback,
|
||||||
llmodel_recalculate_callback recalculate_callback,
|
llmodel_recalculate_callback recalculate_callback,
|
||||||
llmodel_prompt_context *ctx);
|
llmodel_prompt_context *ctx);
|
||||||
|
@ -84,12 +84,13 @@ class LLModelPromptContext(ctypes.Structure):
|
|||||||
("repeat_last_n", ctypes.c_int32),
|
("repeat_last_n", ctypes.c_int32),
|
||||||
("context_erase", ctypes.c_float)]
|
("context_erase", ctypes.c_float)]
|
||||||
|
|
||||||
|
PromptCallback = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_int32)
|
||||||
ResponseCallback = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_int32, ctypes.c_char_p)
|
ResponseCallback = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_int32, ctypes.c_char_p)
|
||||||
RecalculateCallback = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_bool)
|
RecalculateCallback = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_bool)
|
||||||
|
|
||||||
llmodel.llmodel_prompt.argtypes = [ctypes.c_void_p,
|
llmodel.llmodel_prompt.argtypes = [ctypes.c_void_p,
|
||||||
ctypes.c_char_p,
|
ctypes.c_char_p,
|
||||||
ResponseCallback,
|
PromptCallback,
|
||||||
ResponseCallback,
|
ResponseCallback,
|
||||||
RecalculateCallback,
|
RecalculateCallback,
|
||||||
ctypes.POINTER(LLModelPromptContext)]
|
ctypes.POINTER(LLModelPromptContext)]
|
||||||
@ -218,7 +219,7 @@ class LLModel:
|
|||||||
|
|
||||||
llmodel.llmodel_prompt(self.model,
|
llmodel.llmodel_prompt(self.model,
|
||||||
prompt,
|
prompt,
|
||||||
ResponseCallback(self._prompt_callback),
|
PromptCallback(self._prompt_callback),
|
||||||
ResponseCallback(self._response_callback),
|
ResponseCallback(self._response_callback),
|
||||||
RecalculateCallback(self._recalculate_callback),
|
RecalculateCallback(self._recalculate_callback),
|
||||||
context)
|
context)
|
||||||
@ -232,7 +233,7 @@ class LLModel:
|
|||||||
|
|
||||||
# Empty prompt callback
|
# Empty prompt callback
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _prompt_callback(token_id, response):
|
def _prompt_callback(token_id):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Empty response callback method that just prints response to be collected
|
# Empty response callback method that just prints response to be collected
|
||||||
|
Loading…
Reference in New Issue
Block a user