@@ -56,7 +56,7 @@ Error HFTokenizer::load(const std::string& path) {
5656 json parsed_json;
5757 try {
5858 parsed_json = json::parse (contents);
59- } catch (const json ::exception& e) {
59+ } catch (const std ::exception& e) {
6060 TK_LOG (Error, " Error parsing json file: %s" , e.what ());
6161 return Error::LoadFailure;
6262 }
@@ -76,7 +76,7 @@ Error HFTokenizer::load(const std::string& path) {
7676
7777 // Store for future use.
7878 special_token_map_.emplace (std::move (special_token_map));
79- } catch (const json::out_of_range & e) {
79+ } catch (const std::exception & e) {
8080 TK_LOG (Info, " Could not parse special tokens: %s" , e.what ());
8181 return Error::LoadFailure;
8282 }
@@ -96,7 +96,7 @@ Error HFTokenizer::load(const std::string& path) {
9696
9797 auto token_map = TK_UNWRAP (detail::build_token_map (std::move (token_pairs)));
9898 token_map_.emplace (std::move (token_map));
99- } catch (const json::out_of_range & e) {
99+ } catch (const std::exception & e) {
100100 TK_LOG (Info, " Could not parse tokens: %s" , e.what ());
101101 return Error::LoadFailure;
102102 }
@@ -114,7 +114,7 @@ Error HFTokenizer::load(const std::string& path) {
114114 } else {
115115 TK_LOG (Info, " Normalizer field is null, skipping" );
116116 }
117- } catch (const json::out_of_range & e) {
117+ } catch (const std::exception & e) {
118118 // No "Normalizer" field found
119119 TK_LOG (
120120 Info,
@@ -129,7 +129,7 @@ Error HFTokenizer::load(const std::string& path) {
129129 .parse_json (parsed_json.at (" pre_tokenizer" ))
130130 .create ();
131131 TK_LOG (Info, " Pretokenizer set up" );
132- } catch (const json::out_of_range & e) {
132+ } catch (const std::exception & e) {
133133 TK_LOG (Info, " Could not parse pre_tokenizer: %s" , e.what ());
134134 return Error::LoadFailure;
135135 }
@@ -138,7 +138,7 @@ Error HFTokenizer::load(const std::string& path) {
138138 try {
139139 _decoder =
140140 TokenDecoderConfig ().parse_json (parsed_json.at (" decoder" )).create ();
141- } catch (const json::out_of_range &) {
141+ } catch (const std::exception &) {
142142 // No decoder specified
143143 }
144144
@@ -192,7 +192,7 @@ Error HFTokenizer::load(const std::string& path) {
192192 " Built merge ranks map with %" PRId64 " entries" ,
193193 static_cast <int64_t >(merge_ranks.size ()));
194194 merge_ranks_.emplace (std::move (merge_ranks));
195- } catch (const json::out_of_range & e) {
195+ } catch (const std::exception & e) {
196196 TK_LOG (Error, " Could not parse merges: %s" , e.what ());
197197 return Error::LoadFailure;
198198 }
@@ -211,7 +211,7 @@ Error HFTokenizer::load(const std::string& path) {
211211 json parsed_config_json;
212212 try {
213213 parsed_config_json = json::parse (config_contents);
214- } catch (const json ::exception& e) {
214+ } catch (const std ::exception& e) {
215215 TK_LOG (Error, " Error parsing model config json json file: %s" , e.what ());
216216 return Error::LoadFailure;
217217 }
@@ -239,7 +239,7 @@ Error HFTokenizer::load(const std::string& path) {
239239 }
240240 bos_tok_ = *bos_res;
241241 eos_tok_ = *eos_res;
242- } catch (const json::out_of_range & e) {
242+ } catch (const std::exception & e) {
243243 TK_LOG (Error, " Could not eos/bos from tokenizer config: %s" , e.what ());
244244 return Error::LoadFailure;
245245 }
0 commit comments