Skip to content

Commit 7e3ee1a

Browse files
committed
Update
1 parent 5ddf55e commit 7e3ee1a

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

dlib/tokenizer/bpe_tokenizer.h

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,6 @@ namespace dlib
191191
// Merge pairs in order of their merge priority
192192
while (!pq.empty()) {
193193
const auto& top_element = pq.top();
194-
int merge_order = top_element.first;
195194
const std::pair<int, int>& pair = top_element.second;
196195
pq.pop();
197196

@@ -239,7 +238,7 @@ namespace dlib
239238
std::string decode(const std::vector<int>& ids, bool display_special_tokens = true)
240239
{
241240
std::vector<uint8_t> bytes;
242-
size_t vocab_size = get_vocab_size();
241+
int vocab_size = static_cast<int>(get_vocab_size());
243242
for (int id : ids)
244243
{
245244
if (id < vocab_size)
@@ -271,7 +270,7 @@ namespace dlib
271270
int nb_merges = tok.merges.size();
272271
dlib::serialize(nb_merges, out);
273272
for (int idx = (BASE_VOCAB_SIZE + (int)tok.special_tokens.size());
274-
idx < (tok.vocab_size + tok.special_tokens.size()); ++idx)
273+
idx < (tok.vocab_size + (int)tok.special_tokens.size()); ++idx)
275274
{
276275
for (const auto& merge_pair : tok.merges)
277276
{

0 commit comments

Comments
 (0)