Skip to content

Commit f10fc56

Browse files
committed
rm can_be_detokenized
1 parent 51afc0a commit f10fc56

File tree

2 files changed

+7
-11
lines changed

2 files changed

+7
-11
lines changed

tools/server/server.cpp

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2135,13 +2135,6 @@ struct server_context {
21352135
return ret;
21362136
}
21372137

2138-
bool can_be_detokenized(const struct llama_context * ctx, const server_tokens & inp) {
2139-
const llama_model * model = llama_get_model(ctx);
2140-
const llama_vocab * vocab = llama_model_get_vocab(model);
2141-
const int32_t n_vocab = llama_vocab_n_tokens(vocab);
2142-
return inp.validate(n_vocab);
2143-
}
2144-
21452138
bool launch_slot_with_task(server_slot & slot, server_task && task) {
21462139
slot.reset();
21472140
slot.id_task = task.id;
@@ -2156,8 +2149,7 @@ struct server_context {
21562149
slot.lora = slot.params.lora;
21572150
}
21582151

2159-
bool can_detokenize = can_be_detokenized(ctx, slot.prompt_tokens);
2160-
if (!can_detokenize) {
2152+
if (!slot.prompt_tokens.validate(ctx)) {
21612153
send_error(task, "Prompt contains invalid tokens", ERROR_TYPE_INVALID_REQUEST);
21622154
return false;
21632155
}

tools/server/utils.hpp

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1227,7 +1227,11 @@ struct server_tokens {
12271227
}
12281228

12291229
// make sure all text tokens are within the vocab range
1230-
bool validate(llama_token max_vocab_id) const {
1230+
bool validate(const struct llama_context * ctx) const {
1231+
const llama_model * model = llama_get_model(ctx);
1232+
const llama_vocab * vocab = llama_model_get_vocab(model);
1233+
const int32_t n_vocab = llama_vocab_n_tokens(vocab);
1234+
12311235
for (size_t i = 0; i < tokens.size(); ++i) {
12321236
auto & t = tokens[i];
12331237
if (t == LLAMA_TOKEN_NULL) {
@@ -1239,7 +1243,7 @@ struct server_tokens {
12391243
} catch (const std::exception & e) {
12401244
return false;
12411245
}
1242-
} else if (t < 0 || t >= max_vocab_id) {
1246+
} else if (t < 0 || t >= n_vocab) {
12431247
return false;
12441248
}
12451249
}

0 commit comments

Comments
 (0)