diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index c47ed3e47a76d..335ec79f6af64 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -341,9 +341,9 @@ static std::string llama_get_chat_template(const struct llama_model * model) { if (res < 0) { return ""; } else { - std::vector model_template(res, 0); + std::vector model_template(res + 1, 0); llama_model_meta_val_str(model, template_key.c_str(), model_template.data(), model_template.size()); - return std::string(model_template.data(), model_template.size()); + return std::string(model_template.data(), model_template.size() - 1); } } diff --git a/include/llama.h b/include/llama.h index 90791d5f5ea12..cd592aa179d4f 100644 --- a/include/llama.h +++ b/include/llama.h @@ -451,6 +451,7 @@ extern "C" { // Functions to access the model's GGUF metadata scalar values // - The functions return the length of the string on success, or -1 on failure // - The output string is always null-terminated and cleared on failure + // - When retrieving a string, an extra byte must be allocated to account for the null terminator // - GGUF array values are not supported by these functions // Get metadata value as a string by key name