From 2796953257ee5383fa7c8fe8fa8fc888c048fb0b Mon Sep 17 00:00:00 2001 From: crasm Date: Sun, 17 Dec 2023 14:37:01 -0500 Subject: [PATCH] Revert "Fail test if model file is missing" This reverts commit 32ebd525bf7e5a87ee8a3dbaab3d92ce79fbf23d. --- tests/test-model-load-cancel.cpp | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/tests/test-model-load-cancel.cpp b/tests/test-model-load-cancel.cpp index cb3c012b9..ff24a5955 100644 --- a/tests/test-model-load-cancel.cpp +++ b/tests/test-model-load-cancel.cpp @@ -1,20 +1,8 @@ #include "llama.h" -#include #include int main(void) { - auto model_path = "models/7B/ggml-model-f16.gguf"; - auto file = fopen(model_path, "r"); - - if (file == nullptr) { - fprintf(stderr, "no model at '%s' found\n", model_path); - return EXIT_FAILURE; - } else { - fprintf(stderr, "using '%s'\n", model_path); - fclose(file); - } - llama_backend_init(false); auto params = llama_model_params{}; params.use_mmap = false; @@ -22,7 +10,7 @@ int main(void) { (void) ctx; return progress > 0.50; }; - auto * model = llama_load_model_from_file(model_path, params); + auto * model = llama_load_model_from_file("models/7B/ggml-model-f16.gguf", params); llama_backend_free(); return model == nullptr ? EXIT_SUCCESS : EXIT_FAILURE; }