grammar : revert the replacement of llama_token_to_piece with id_to_token (#4396)

This commit is contained in:
Xiang (Kevin) Li 2023-12-09 16:29:27 -05:00 committed by GitHub
parent fe680e3d10
commit e18f7345a3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -7503,7 +7503,7 @@ void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * c
for (size_t i = 0; i < candidates->size; ++i) {
const llama_token id = candidates->data[i].id;
const std::string & piece = ctx->model.vocab.id_to_token[id].text;
const std::string piece = llama_token_to_piece(ctx, id);
if (id == eos) {
if (!allow_eos) {
candidates->data[i].logit = -INFINITY;
@ -7715,7 +7715,7 @@ void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar
GGML_ASSERT(false);
}
const std::string & piece = ctx->model.vocab.id_to_token[token].text;
const std::string piece = llama_token_to_piece(ctx, token);
// Note terminating 0 in decoded string
const auto decoded = decode_utf8(piece, grammar->partial_utf8);