From b7647436ccc80970b44a270f70f4f2ea139054d1 Mon Sep 17 00:00:00 2001 From: Alex Klinkhamer Date: Sun, 16 Jul 2023 14:01:45 -0700 Subject: [PATCH] llama : fix t_start_sample_us initialization warning (#2238) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 27e1ee964..0f9d5346d 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2205,7 +2205,7 @@ void llama_sample_classifier_free_guidance( struct llama_context * guidance_ctx, float scale, float smooth_factor) { - int64_t t_start_sample_us = t_start_sample_us = ggml_time_us(); + int64_t t_start_sample_us = ggml_time_us(); assert(ctx); auto n_vocab = llama_n_vocab(ctx);