|
| 1 | +# Copyright (c) Meta Platforms, Inc. and affiliates. |
| 2 | +# All rights reserved. |
| 3 | +# |
| 4 | +# This source code is licensed under the BSD-style license found in the |
| 5 | +# LICENSE file in the root directory of this source tree. |
| 6 | + |
| 7 | +# Standard |
| 8 | +from typing import List, Optional |
| 9 | +import json |
| 10 | +import os |
| 11 | + |
| 12 | +# Third Party |
| 13 | +from tokenizers import Tokenizer |
| 14 | + |
| 15 | +# Local |
| 16 | +from .base import TokenizerBase |
| 17 | + |
| 18 | + |
| 19 | +class HFTokenizer(TokenizerBase): |
| 20 | + """ |
| 21 | + Wrapper around the Huggingface `tokenizers` library for API compatibility |
| 22 | + """ |
| 23 | + |
| 24 | + def __init__(self, file_path: str): |
| 25 | + # If the path is a directory, look for "tokenizer.json" which is |
| 26 | + # standard for transformers checkpoints and also look for the |
| 27 | + # "tokenizer_config.json" file to parse eos/bos tokens |
| 28 | + if os.path.isdir(file_path): |
| 29 | + tokenizer_path = os.path.join(file_path, "tokenizer.json") |
| 30 | + tokenizer_config_path = os.path.join(file_path, "tokenizer_config.json") |
| 31 | + else: |
| 32 | + tokenizer_path = file_path |
| 33 | + tokenizer_config_path = os.path.join(os.path.dirname(file_path), "tokenizer_config.json") |
| 34 | + if not os.path.isfile(tokenizer_path): |
| 35 | + tokenizer_config_path = None |
| 36 | + |
| 37 | + # Load the tokenizer itself |
| 38 | + self._tokenizer = Tokenizer.from_file(tokenizer_path) |
| 39 | + |
| 40 | + # If available, parse bos/eos tokens from the tokenizer config |
| 41 | + self._bos_id, self._eos_id = None, None |
| 42 | + if tokenizer_config_path is not None: |
| 43 | + with open(tokenizer_config_path, "r") as handle: |
| 44 | + tok_config = json.load(handle) |
| 45 | + bos_token = tok_config.get("bos_token") |
| 46 | + eos_token = tok_config.get("eos_token") |
| 47 | + if bos_token is not None: |
| 48 | + self._bos_id = self._tokenizer.token_to_id(bos_token) |
| 49 | + if eos_token is not None: |
| 50 | + self._eos_id = self._tokenizer.token_to_id(eos_token) |
| 51 | + |
| 52 | + # If no eos/bos tokens found, go looking for them! |
| 53 | + if None in [self._bos_id, self._eos_id]: |
| 54 | + tok_content = json.loads(self._tokenizer.to_str()) |
| 55 | + if self._bos_id is None: |
| 56 | + self._bos_id = self._look_for_special_token(tok_content, ["begin", "text"]) |
| 57 | + if self._eos_id is None: |
| 58 | + self._eos_id = self._look_for_special_token(tok_content, ["end", "text"]) |
| 59 | + |
| 60 | + assert None not in [self._bos_id, self._eos_id], "Unable to find an BOS/EOS tokens" |
| 61 | + |
| 62 | + @staticmethod |
| 63 | + def _look_for_special_token(added_tokens: dict, search_strs: List[str]) -> Optional[int]: |
| 64 | + candidate_toks = added_tokens |
| 65 | + for search_str in search_strs: |
| 66 | + candidate_toks = [ |
| 67 | + tok for tok in candidate_toks |
| 68 | + if tok["special"] and search_str in tok["content"] |
| 69 | + ] |
| 70 | + if len(candidate_toks) == 1: |
| 71 | + return candidate_toks[0]["id"] |
| 72 | + |
| 73 | + def encode( |
| 74 | + self, |
| 75 | + s: str, |
| 76 | + *, |
| 77 | + bos: bool = False, |
| 78 | + eos: bool = False, |
| 79 | + ) -> List[int]: |
| 80 | + res = self._tokenizer.encode(s, add_special_tokens=bos).ids |
| 81 | + if eos and (not res or res[-1] != self._eos_token): |
| 82 | + res.append(self._eos_token) |
| 83 | + return res |
| 84 | + |
| 85 | + def decode(self, ids: List[int]) -> str: |
| 86 | + return self._tokenizer.decode(ids) |
| 87 | + |
| 88 | + def bos_id(self) -> int: |
| 89 | + return self._bos_id |
| 90 | + |
| 91 | + def eos_id(self) -> int: |
| 92 | + return self._eos_id |
0 commit comments