11from .filter import Filter
22from ...tokenizer import Tokenizer
3- from formatron .integrations .utils import get_original_characters , default_mask_logits_fn , get_bit_mask
4- from formatron .formatter import FormatterBuilder
5- from formatron .config import EngineGenerationConfig
6- import kbnf
73import torch
84from functools import lru_cache
95
6+ try :
7+ import kbnf
8+ from formatron .integrations .utils import get_original_characters , default_mask_logits_fn , get_bit_mask
9+ from formatron .formatter import FormatterBuilder
10+ from formatron .config import EngineGenerationConfig
11+ formatron_available = True
12+ except ModuleNotFoundError :
13+ formatron_available = False
14+ except ImportError :
15+ formatron_available = False
16+
17+
1018@lru_cache (10 )
1119def create_engine_vocabulary (
1220 tokenizer : Tokenizer ,
@@ -31,6 +39,9 @@ def __init__(
3139 engine_config : EngineGenerationConfig = None ,
3240 vocab_processors : list [callable ] | None = None
3341 ):
42+ if not formatron_available :
43+ raise ValueError ("Formatron package is not available." )
44+
3445 super ().__init__ (tokenizer , trigger_token , prefix_str , eos_after_completed )
3546 assert formatter_builder is not None
3647 self ._formatter = formatter_builder .build (
0 commit comments