diff --git a/examples/server/server.cpp b/examples/server/server.cpp index b4c4d0a20..5b7e4139d 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1502,7 +1502,7 @@ struct llama_server_context { for (auto & slot : slots) { - const bool has_prompt = slot.prompt.is_array() || (slot.prompt.is_string() && !slot.prompt.get().empty()); + const bool has_prompt = slot.prompt.is_array() || (slot.prompt.is_string() && !slot.prompt.get().empty()) || !slot.images.empty(); // empty prompt passed -> release the slot and send empty response if (slot.state == IDLE && slot.command == LOAD_PROMPT && !has_prompt)