We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent a631456 commit 4fdc52fCopy full SHA for 4fdc52f
convert_hf_to_gguf.py
@@ -4937,7 +4937,6 @@ def set_vocab(self):
4937
from transformers import AutoTokenizer
4938
tokenizer = AutoTokenizer.from_pretrained(self.dir_model, trust_remote_code=True)
4939
special_vocab = gguf.SpecialVocab(self.dir_model, load_merges=True)
4940
- vocab_size = self.hparams["vocab_size"]
4941
tokens, toktypes, tokpre = self.get_vocab_base()
4942
self.gguf_writer.add_tokenizer_model("gpt2")
4943
self.gguf_writer.add_tokenizer_pre(tokpre)
0 commit comments