Skip to content

Commit cf5b70c

Browse files
committed
Fix typing
1 parent 1671309 commit cf5b70c

File tree

2 files changed

+6
-7
lines changed

2 files changed

+6
-7
lines changed

convert_hf_to_gguf.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
import logging
88
import argparse
99
import contextlib
10-
import importlib.util
1110
import json
1211
import os
1312
import re
@@ -31,7 +30,7 @@
3130
import gguf
3231
from gguf.vocab import MistralTokenizerType, MistralVocab
3332

34-
if importlib.util.find_spec("mistral_common") is not None:
33+
try:
3534
from mistral_common.tokens.tokenizers.base import TokenizerVersion # pyright: ignore[reportMissingImports]
3635
from mistral_common.tokens.tokenizers.multimodal import DATASET_MEAN as _MISTRAL_COMMON_DATASET_MEAN, DATASET_STD as _MISTRAL_COMMON_DATASET_STD # pyright: ignore[reportMissingImports]
3736
from mistral_common.tokens.tokenizers.tekken import Tekkenizer # pyright: ignore[reportMissingImports]
@@ -41,7 +40,7 @@
4140

4241
_mistral_common_installed = True
4342
_mistral_import_error_msg = ""
44-
else:
43+
except ImportError:
4544
_MISTRAL_COMMON_DATASET_MEAN = (0.48145466, 0.4578275, 0.40821073)
4645
_MISTRAL_COMMON_DATASET_STD = (0.26862954, 0.26130258, 0.27577711)
4746

gguf-py/gguf/vocab.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,12 @@
1414
SentencePieceProcessor = None
1515

1616
try:
17-
from mistral_common.tokens.tokenizers.mistral import MistralTokenizer
18-
from mistral_common.tokens.tokenizers.tekken import Tekkenizer
19-
from mistral_common.tokens.tokenizers.utils import (
17+
from mistral_common.tokens.tokenizers.mistral import MistralTokenizer # pyright: ignore[reportMissingImports]
18+
from mistral_common.tokens.tokenizers.tekken import Tekkenizer # pyright: ignore[reportMissingImports]
19+
from mistral_common.tokens.tokenizers.utils import ( # pyright: ignore[reportMissingImports]
2020
_filter_valid_tokenizer_files,
2121
)
22-
from mistral_common.tokens.tokenizers.sentencepiece import (
22+
from mistral_common.tokens.tokenizers.sentencepiece import ( # pyright: ignore[reportMissingImports]
2323
SentencePieceTokenizer,
2424
)
2525
except ImportError:

0 commit comments

Comments
 (0)