python: load templates from model files, and add legacy template warning

Signed-off-by: Jared Van Bortel <jared@nomic.ai>
This commit is contained in:
Jared Van Bortel
2024-12-05 14:35:48 -05:00
parent 2db59f0092
commit d6638b5064
4 changed files with 95 additions and 16 deletions

View File

@@ -312,6 +312,8 @@ int32_t llmodel_count_prompt_tokens(llmodel_model model, const char *prompt, con
void llmodel_model_foreach_special_token(llmodel_model model, llmodel_special_token_callback callback);
const char *llmodel_model_chat_template(const char *model_path, const char **error);
#ifdef __cplusplus
}
#endif

View File

@@ -34,11 +34,11 @@ llmodel_model llmodel_model_create(const char *model_path)
return fres;
}
static void llmodel_set_error(const char **errptr, const char *message)
static void llmodel_set_error(const char **errptr, std::string message)
{
thread_local static std::string last_error_message;
if (errptr) {
last_error_message = message;
last_error_message = std::move(message);
*errptr = last_error_message.c_str();
}
}
@@ -318,3 +318,15 @@ void llmodel_model_foreach_special_token(llmodel_model model, llmodel_special_to
for (auto &[name, token] : wrapper->llModel->specialTokens())
callback(name.c_str(), token.c_str());
}
const char *llmodel_model_chat_template(const char *model_path, const char **error)
{
static std::string s_chatTemplate;
auto res = LLModel::Implementation::chatTemplate(model_path);
if (res) {
s_chatTemplate = *res;
return s_chatTemplate.c_str();
}
llmodel_set_error(error, std::move(res.error()));
return nullptr;
}