We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 0432c12 commit 2a6b6c6Copy full SHA for 2a6b6c6
paddlenlp/transformers/tokenizer_utils_base.py
@@ -1619,8 +1619,8 @@ def convert_added_tokens(obj):
1619
# does include a vocab file path in it. However, if the vocab file
1620
# path included in json does not exist, such as was deleted, to make
1621
# it still work, use the vocab file under this dir.
1622
- elif not os.path.isfile(init_kwargs[args_name]) and os.path.isfile(
1623
- file_path):
+ elif not os.path.isfile(init_kwargs[args_name],
+ '') and os.path.isfile(file_path):
1624
init_kwargs[args_name] = file_path
1625
# TODO(guosheng): avoid reduplication of position args and key word args
1626
tokenizer = cls(*init_args, **init_kwargs)
0 commit comments