Skip to content

Commit bb4d9c3

Browse files
committed
remove further type ignore
1 parent 9756d30 commit bb4d9c3

File tree

2 files changed

+14
-14
lines changed

2 files changed

+14
-14
lines changed

stubs/transformers/__init__.pyi

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,26 @@
11
from typing import Any
22

3+
from typing_extensions import Self
4+
35
from . import modeling_utils, processing_utils, tokenization_utils
46
from .modeling_utils import PreTrainedModel
57
from .processing_utils import ProcessorMixin
68
from .tokenization_utils import PreTrainedTokenizer
79

810
class AutoModelForCausalLM(PreTrainedModel):
911
@classmethod
10-
def from_pretrained(cls, *args: Any, **kwargs: Any) -> PreTrainedModel: ...
12+
def from_pretrained(cls, *args: Any, **kwargs: Any) -> Self: ...
1113

1214
class AutoTokenizer(PreTrainedTokenizer):
1315
@classmethod
14-
def from_pretrained(cls, *args: Any, **kwargs: Any) -> PreTrainedTokenizer: ...
16+
def from_pretrained(cls, *args: Any, **kwargs: Any) -> Self: ...
1517

1618
class AutoProcessor(ProcessorMixin):
1719
@classmethod
18-
def from_pretrained(cls, *args: Any, **kwargs: Any) -> ProcessorMixin: ...
20+
def from_pretrained(cls, *args: Any, **kwargs: Any) -> Self: ...
1921

2022
class LlavaForConditionalGeneration(PreTrainedModel):
2123
@classmethod
22-
def from_pretrained(cls, *args: Any, **kwargs: Any) -> PreTrainedModel: ...
24+
def from_pretrained(cls, *args: Any, **kwargs: Any) -> Self: ...
2325

2426
def from_pretrained(*args: Any, **kwargs: Any) -> Any: ...

tests/models/test_outlines.py

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -122,11 +122,11 @@ async def generate_stream(
122122

123123
@pytest.fixture
124124
def transformers_model() -> OutlinesModel:
125-
hf_model = transformers.AutoModelForCausalLM.from_pretrained( # type: ignore
125+
hf_model = transformers.AutoModelForCausalLM.from_pretrained(
126126
'erwanf/gpt2-mini',
127127
device_map='cpu',
128128
)
129-
hf_tokenizer = transformers.AutoTokenizer.from_pretrained('erwanf/gpt2-mini') # type: ignore
129+
hf_tokenizer = transformers.AutoTokenizer.from_pretrained('erwanf/gpt2-mini')
130130
chat_template = '{% for message in messages %}{{ message.role }}: {{ message.content }}{% endfor %}'
131131
hf_tokenizer.chat_template = chat_template
132132
outlines_model = outlines.models.transformers.from_transformers( # type: ignore[reportUnknownMemberType]
@@ -138,13 +138,11 @@ def transformers_model() -> OutlinesModel:
138138

139139
@pytest.fixture
140140
def transformers_multimodal_model() -> OutlinesModel:
141-
hf_model = transformers.LlavaForConditionalGeneration.from_pretrained( # type: ignore
141+
hf_model = transformers.LlavaForConditionalGeneration.from_pretrained(
142142
'trl-internal-testing/tiny-LlavaForConditionalGeneration',
143143
device_map='cpu',
144144
)
145-
hf_processor = transformers.AutoProcessor.from_pretrained( # type: ignore
146-
'trl-internal-testing/tiny-LlavaForConditionalGeneration'
147-
)
145+
hf_processor = transformers.AutoProcessor.from_pretrained('trl-internal-testing/tiny-LlavaForConditionalGeneration')
148146
outlines_model = outlines.models.transformers.from_transformers( # type: ignore[reportUnknownMemberType]
149147
hf_model,
150148
hf_processor, # type: ignore
@@ -198,11 +196,11 @@ def binary_image() -> BinaryImage:
198196
pytest.param(
199197
'from_transformers',
200198
lambda: (
201-
transformers.AutoModelForCausalLM.from_pretrained( # type: ignore
199+
transformers.AutoModelForCausalLM.from_pretrained(
202200
'erwanf/gpt2-mini',
203201
device_map='cpu',
204202
),
205-
transformers.AutoTokenizer.from_pretrained('erwanf/gpt2-mini'), # type: ignore
203+
transformers.AutoTokenizer.from_pretrained('erwanf/gpt2-mini'),
206204
),
207205
marks=skip_if_transformers_imports_unsuccessful,
208206
),
@@ -257,11 +255,11 @@ def test_init(model_loading_function_name: str, args: Callable[[], tuple[Any]])
257255
pytest.param(
258256
'from_transformers',
259257
lambda: (
260-
transformers.AutoModelForCausalLM.from_pretrained( # type: ignore
258+
transformers.AutoModelForCausalLM.from_pretrained(
261259
'erwanf/gpt2-mini',
262260
device_map='cpu',
263261
),
264-
transformers.AutoTokenizer.from_pretrained('erwanf/gpt2-mini'), # type: ignore
262+
transformers.AutoTokenizer.from_pretrained('erwanf/gpt2-mini'),
265263
),
266264
marks=skip_if_transformers_imports_unsuccessful,
267265
),

0 commit comments

Comments
 (0)