Skip to content

Commit c90fb03

Browse files
authored
[CI/Build] Skip Mllama HF runner tests with Transformers v4.55.0 (#22659)
Signed-off-by: Isotr0py <[email protected]>
1 parent 84cf78a commit c90fb03

File tree

1 file changed

+17
-0
lines changed

1 file changed

+17
-0
lines changed

tests/models/multimodal/generation/test_mllama.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import pytest
77
import torch
88
from transformers import AutoConfig, AutoModelForImageTextToText, AutoTokenizer
9+
from transformers import __version__ as TRANSFORMERS_VERSION
910

1011
from vllm import LLM, SamplingParams
1112
from vllm.attention.backends.flash_attn import FlashAttentionMetadata
@@ -285,6 +286,10 @@ def clear_cache():
285286
@pytest.mark.parametrize("max_tokens", [128])
286287
@pytest.mark.parametrize("num_logprobs", [5])
287288
@pytest.mark.parametrize("attn_backend", LIST_ENC_DEC_SUPPORTED_BACKENDS)
289+
@pytest.mark.skipif(
290+
TRANSFORMERS_VERSION == "4.55.0",
291+
reason="Transformers v4.55.0 has a regression issue on mllama, "
292+
"see: https://github.com/huggingface/transformers/pull/40083")
288293
def test_models_single_leading_image(hf_runner, vllm_runner, image_assets,
289294
model, sizes, dtype, max_tokens,
290295
num_logprobs,
@@ -313,6 +318,10 @@ def test_models_single_leading_image(hf_runner, vllm_runner, image_assets,
313318
@pytest.mark.parametrize("max_tokens", [128])
314319
@pytest.mark.parametrize("num_logprobs", [5])
315320
@pytest.mark.parametrize("attn_backend", LIST_ENC_DEC_SUPPORTED_BACKENDS)
321+
@pytest.mark.skipif(
322+
TRANSFORMERS_VERSION == "4.55.0",
323+
reason="Transformers v4.55.0 has a regression issue on mllama, "
324+
"see: https://github.com/huggingface/transformers/pull/40083")
316325
def test_models_multi_leading_images(hf_runner, vllm_runner, image_assets,
317326
model, dtype, max_tokens, num_logprobs,
318327
attn_backend: _Backend) -> None:
@@ -362,6 +371,10 @@ def test_models_multi_leading_images(hf_runner, vllm_runner, image_assets,
362371
@pytest.mark.parametrize("max_tokens", [128])
363372
@pytest.mark.parametrize("num_logprobs", [5])
364373
@pytest.mark.parametrize("attn_backend", LIST_ENC_DEC_SUPPORTED_BACKENDS)
374+
@pytest.mark.skipif(
375+
TRANSFORMERS_VERSION == "4.55.0",
376+
reason="Transformers v4.55.0 has a regression issue on mllama, "
377+
"see: https://github.com/huggingface/transformers/pull/40083")
365378
def test_models_interleaved_images(hf_runner, vllm_runner, image_assets, model,
366379
dtype, max_tokens, num_logprobs,
367380
attn_backend: _Backend) -> None:
@@ -402,6 +415,10 @@ def test_models_interleaved_images(hf_runner, vllm_runner, image_assets, model,
402415
@pytest.mark.parametrize("dtype", ["bfloat16"])
403416
@pytest.mark.parametrize("max_tokens", [64])
404417
@pytest.mark.parametrize("num_logprobs", [5])
418+
@pytest.mark.skipif(
419+
TRANSFORMERS_VERSION == "4.55.0",
420+
reason="Transformers v4.55.0 has a regression issue on mllama, "
421+
"see: https://github.com/huggingface/transformers/pull/40083")
405422
def test_models_distributed(
406423
hf_runner,
407424
vllm_runner,

0 commit comments

Comments
 (0)