@@ -184,15 +184,18 @@ def onnx_embed_image(self, images: list[ImageInput], **kwargs: Any) -> OnnxOutpu
184184 if isinstance (processed [0 ], list ):
185185 encoded , attention_mask , metadata = self ._process_nested_patches (processed )
186186 else :
187- encoded , attention_mask , metadata = self ._process_flat_images (processed , len (images ))
187+ encoded , attention_mask , metadata = self ._process_flat_images (
188+ processed , # type: ignore[arg-type]
189+ len (images ),
190+ )
188191
189192 onnx_input = {"pixel_values" : encoded , "attention_mask" : attention_mask }
190193 onnx_input = self ._preprocess_onnx_image_input (onnx_input , ** kwargs )
191194 model_output = self .model .run (None , onnx_input ) # type: ignore[union-attr]
192195
193196 return OnnxOutputContext (
194197 model_output = model_output [0 ],
195- attention_mask = attention_mask ,
198+ attention_mask = attention_mask , # type: ignore[arg-type]
196199 metadata = metadata ,
197200 )
198201
@@ -231,7 +234,7 @@ def _process_nested_patches(
231234 attention_mask [i , j ] = 1
232235
233236 metadata = {"patch_counts" : patch_counts }
234- return encoded , attention_mask , metadata
237+ return encoded , attention_mask , metadata # type: ignore[return-value]
235238
236239 def _process_flat_images (
237240 self , processed : list [NumpyArray ], num_images : int
@@ -269,7 +272,7 @@ def _process_flat_images(
269272 attention_mask = np .ones ((num_images , 1 ), dtype = np .int64 )
270273 metadata = {"patch_counts" : [1 ] * num_images }
271274
272- return encoded , attention_mask , metadata
275+ return encoded , attention_mask , metadata # type: ignore[return-value]
273276
274277 def _needs_patch_dimension (self ) -> bool :
275278 """
0 commit comments