We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 1606e81 commit 7b42c07Copy full SHA for 7b42c07
convert_hf_to_gguf.py
@@ -5058,14 +5058,19 @@ def set_vocab(self):
5058
self.gguf_writer.add_token_list(tokens)
5059
self.gguf_writer.add_token_types(toktypes)
5060
try:
5061
+ tokenizer_file = self.dir_model / 'tokenizer.json'
5062
+ if not tokenizer_file.is_file():
5063
+ raise ValueError("tokenizer.json not found")
5064
+
5065
# for https://huggingface.co/THUDM/glm-4-9b
5066
special_vocab=gguf.SpecialVocab(
5067
self.dir_model,
5068
load_merges=True,
5069
n_vocab=vocab_size
5070
)
-
5071
5072
self.gguf_writer.add_tokenizer_model("gpt2")
5073
5074
except Exception as e:
5075
logger.warning(f'Failed to load special tokens: {e}')
5076
# for https://huggingface.co/THUDM/glm-4-9b-hf
0 commit comments