Skip to content

Commit f4d62da

Browse files
authored
Update transformers-stubs (#372)
1 parent a6e4a4e commit f4d62da

File tree

3 files changed

+18
-25
lines changed

3 files changed

+18
-25
lines changed

stubs/transformers-stubs/models/auto/auto_factory.pyi

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,15 @@
11
import os
22
from collections import OrderedDict
33
from collections.abc import Iterator
4-
from typing import Any
4+
from typing import Any, TypeVar, Union
55
from typing_extensions import TypeAlias
66

77
from transformers.configuration_utils import PretrainedConfig
88
from transformers.tokenization_utils_fast import PreTrainedTokenizerFast
99

10-
_LazyAutoMappingValue: TypeAlias = tuple[
11-
# Tokenizers will depend on packages installed, too much variance and there are no common base or Protocol
12-
type[Any | None],
13-
type[PreTrainedTokenizerFast | None],
14-
]
10+
_T = TypeVar("_T")
11+
# Tokenizers will depend on packages installed, too much variance and there are no common base or Protocol
12+
_LazyAutoMappingValue: TypeAlias = tuple[type[Any] | None, type[Any] | None]
1513

1614
CLASS_DOCSTRING: str
1715
FROM_CONFIG_DOCSTRING: str
@@ -26,7 +24,7 @@ class _BaseAutoModelClass:
2624
@classmethod
2725
def from_pretrained(cls, pretrained_model_name_or_path: str | os.PathLike[str], *model_args, **kwargs): ...
2826
@classmethod
29-
def register(cls, config_class, model_class) -> None: ...
27+
def register(cls, config_class, model_class, exist_ok=False) -> None: ...
3028

3129
def insert_head_doc(docstring, head_doc: str = ""): ...
3230
def auto_class_update(cls, checkpoint_for_example: str = "bert-base-cased", head_doc: str = ""): ...
@@ -38,10 +36,10 @@ class _LazyAutoMapping(OrderedDict[type[PretrainedConfig], _LazyAutoMappingValue
3836
def __len__(self) -> int: ...
3937
def __getitem__(self, key: type[PretrainedConfig]) -> _LazyAutoMappingValue: ...
4038
def keys(self) -> list[type[PretrainedConfig]]: ...
41-
def get(self, key: type[PretrainedConfig], default: _LazyAutoMappingValue) -> _LazyAutoMappingValue: ...
39+
def get(self, key: type[PretrainedConfig], default: _T) -> _LazyAutoMappingValue | _T: ...
4240
def __bool__(self) -> bool: ...
4341
def values(self) -> list[_LazyAutoMappingValue]: ...
4442
def items(self) -> list[tuple[type[PretrainedConfig], _LazyAutoMappingValue]]: ...
4543
def __iter__(self) -> Iterator[type[PretrainedConfig]]: ...
46-
def __contains__(self, item: object) -> bool: ...
47-
def register(self, key: type[PretrainedConfig], value: _LazyAutoMappingValue) -> None: ...
44+
def __contains__(self, item: type) -> bool: ...
45+
def register(self, key: type[PretrainedConfig], value: _LazyAutoMappingValue, exist_ok=False) -> None: ...

stubs/transformers-stubs/models/auto/configuration_auto.pyi

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
import os
22
from collections import OrderedDict
33
from collections.abc import Callable, Iterator, KeysView, ValuesView
4-
from typing import Any, TypeVar
4+
from typing import Any, NoReturn, TypeVar
55

66
from transformers.configuration_utils import PretrainedConfig
77

88
_F = TypeVar("_F", bound=Callable[..., Any])
99

1010
CONFIG_MAPPING_NAMES: OrderedDict[str, str]
11-
CONFIG_ARCHIVE_MAP_MAPPING_NAMES: OrderedDict[str, str]
1211
MODEL_NAMES_MAPPING: OrderedDict[str, str]
1312
SPECIAL_MODEL_TYPE_TO_MODULE_NAME: OrderedDict[str, str]
1413

@@ -23,7 +22,7 @@ class _LazyConfigMapping(OrderedDict[str, type[PretrainedConfig]]):
2322
def items(self) -> list[tuple[str, type[PretrainedConfig]]]: ...
2423
def __iter__(self) -> Iterator[str]: ...
2524
def __contains__(self, item: object) -> bool: ...
26-
def register(self, key: str, value: type[PretrainedConfig]) -> None: ...
25+
def register(self, key: str, value: type[PretrainedConfig], exist_ok=False) -> None: ...
2726

2827
CONFIG_MAPPING: _LazyConfigMapping
2928

@@ -36,8 +35,6 @@ class _LazyLoadAllMappings(OrderedDict[str, str]):
3635
def __iter__(self) -> Iterator[str]: ...
3736
def __contains__(self, item: object) -> bool: ...
3837

39-
ALL_PRETRAINED_CONFIG_ARCHIVE_MAP: _LazyLoadAllMappings
40-
4138
def replace_list_option_in_docstrings(config_to_class=None, use_model_types: bool = True) -> Callable[[_F], _F]: ...
4239

4340
class AutoConfig:
@@ -47,4 +44,4 @@ class AutoConfig:
4744
@classmethod
4845
def from_pretrained(cls, pretrained_model_name_or_path: str | os.PathLike[str], **kwargs): ...
4946
@staticmethod
50-
def register(model_type, config) -> None: ...
47+
def register(model_type, config, exist_ok=False) -> None: ...
Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,24 @@
11
import os
22
from collections import OrderedDict
33
from typing import Any
4+
from typing_extensions import TypeAlias
45

5-
from transformers.configuration_utils import PretrainedConfig
66
from transformers.models.auto.auto_factory import _LazyAutoMapping
77
from transformers.tokenization_utils import PreTrainedTokenizer
88
from transformers.tokenization_utils_fast import PreTrainedTokenizerFast
99

1010
TOKENIZER_MAPPING_NAMES: OrderedDict[str, tuple[str | None, str | None]]
1111
TOKENIZER_MAPPING: _LazyAutoMapping
12-
CONFIG_TO_TYPE: dict[type[PretrainedConfig], str]
12+
CONFIG_TO_TYPE: dict[str, str]
1313

14-
def tokenizer_class_from_name(class_name: str) -> PreTrainedTokenizer | PreTrainedTokenizerFast: ...
14+
def tokenizer_class_from_name(class_name: str) -> type[Any] | None: ...
1515
def get_tokenizer_config(
1616
pretrained_model_name_or_path: str | os.PathLike[str],
1717
cache_dir: str | os.PathLike[str] | None = None,
1818
force_download: bool = False,
19-
resume_download: bool = False,
19+
resume_download: bool | None = None,
2020
proxies: dict[str, str] | None = None,
21-
use_auth_token: bool | str | None = None,
21+
token: bool | str | None = None,
2222
revision: str | None = None,
2323
local_files_only: bool = False,
2424
subfolder: str = "",
@@ -28,7 +28,5 @@ def get_tokenizer_config(
2828
class AutoTokenizer:
2929
def __init__(self) -> None: ...
3030
@classmethod
31-
def from_pretrained(
32-
cls, pretrained_model_name_or_path: str | os.PathLike[str], *inputs, **kwargs
33-
) -> PreTrainedTokenizer | PreTrainedTokenizerFast: ...
34-
def register(config_class, slow_tokenizer_class=None, fast_tokenizer_class=None) -> None: ...
31+
def from_pretrained(cls, pretrained_model_name_or_path: str | os.PathLike[str], *inputs, **kwargs): ...
32+
def register(config_class, slow_tokenizer_class=None, fast_tokenizer_class=None, exist_ok=False) -> None: ...

0 commit comments

Comments
 (0)