From ea3a0ad6b6b5ca4693b94acd4cb32e2803f66fae Mon Sep 17 00:00:00 2001 From: xloem <0xloem@gmail.com> Date: Mon, 1 May 2023 08:58:51 -0400 Subject: [PATCH] llama : update stubs for systems without mmap and mlock (#1266) Co-authored-by: John Doe --- llama-util.h | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/llama-util.h b/llama-util.h index 5f9f70ecc..d531588d5 100644 --- a/llama-util.h +++ b/llama-util.h @@ -243,7 +243,8 @@ struct llama_mmap { #else static constexpr bool SUPPORTED = false; - llama_mmap(struct llama_file *) { + llama_mmap(struct llama_file *, bool prefetch = true) { + (void)prefetch; throw std::string("mmap not supported"); } #endif @@ -382,8 +383,13 @@ struct llama_mlock { #else static constexpr bool SUPPORTED = false; - void raw_lock(const void * addr, size_t size) { + size_t lock_granularity() { + return (size_t) 65536; + } + + bool raw_lock(const void * addr, size_t size) { fprintf(stderr, "warning: mlock not supported on this system\n"); + return false; } void raw_unlock(const void * addr, size_t size) {}