@@ -221,6 +221,18 @@ def word_tokenize(
221221
222222 segments = []
223223
224+ if custom_dict and engine in (
225+ "attacut" ,
226+ "icu" ,
227+ "nercut" ,
228+ "sefr_cut" ,
229+ "tltk" ,
230+ "oskut"
231+ ):
232+ raise NotImplementedError (
233+ f"The { engine } engine does not support custom dictionaries."
234+ )
235+
224236 if engine in ("newmm" , "onecut" ):
225237 from pythainlp .tokenize .newmm import segment
226238
@@ -230,10 +242,6 @@ def word_tokenize(
230242
231243 segments = segment (text , custom_dict , safe_mode = True )
232244 elif engine == "attacut" :
233- if custom_dict :
234- raise NotImplementedError (
235- f"The { engine } engine does not support custom dictionaries."
236- )
237245 from pythainlp .tokenize .attacut import segment
238246
239247 segments = segment (text )
@@ -254,42 +262,22 @@ def word_tokenize(
254262 else :
255263 segments = segment (text )
256264 elif engine == "icu" :
257- if custom_dict :
258- raise NotImplementedError (
259- f"The { engine } engine does not support custom dictionaries."
260- )
261265 from pythainlp .tokenize .pyicu import segment
262266
263267 segments = segment (text )
264268 elif engine == "nercut" :
265- if custom_dict :
266- raise NotImplementedError (
267- f"The { engine } engine does not support custom dictionaries."
268- )
269269 from pythainlp .tokenize .nercut import segment
270270
271271 segments = segment (text )
272272 elif engine == "sefr_cut" :
273- if custom_dict :
274- raise NotImplementedError (
275- f"The { engine } engine does not support custom dictionaries."
276- )
277273 from pythainlp .tokenize .sefr_cut import segment
278274
279275 segments = segment (text )
280276 elif engine == "tltk" :
281- if custom_dict :
282- raise NotImplementedError (
283- f"The { engine } engine does not support custom dictionaries."
284- )
285277 from pythainlp .tokenize .tltk import segment
286278
287279 segments = segment (text )
288280 elif engine == "oskut" :
289- if custom_dict :
290- raise NotImplementedError (
291- f"The { engine } engine does not support custom dictionaries."
292- )
293281 from pythainlp .tokenize .oskut import segment
294282
295283 segments = segment (text )
0 commit comments