55from pathlib import Path
66from typing import List
77
8- import pytest
98import torch
109from _torch .helpers import create_mock_engine
1110from parameterized import parameterized
1211from transformers import AutoProcessor , AutoTokenizer , Qwen2_5_VLConfig
1312from transformers import \
1413 Qwen2_5_VLForConditionalGeneration as HFQwen2_5_VLForConditionalLM
14+ from utils .llm_data import llm_models_root
1515
1616import tensorrt_llm
1717from tensorrt_llm ._torch .attention_backend .utils import get_attention_backend
2828from tensorrt_llm .inputs .multimodal import MultimodalParams
2929from tensorrt_llm .mapping import Mapping
3030
31-
32- def llm_models_root () -> str :
33- '''return LLM_MODELS_ROOT path if it is set in env, assert when it's set but not a valid path
34- '''
35- DEFAULT_LLM_MODEL_ROOT = os .path .join ("/scratch.trt_llm_data" , "llm-models" )
36- LLM_MODELS_ROOT = os .environ .get ("LLM_MODELS_ROOT" , DEFAULT_LLM_MODEL_ROOT )
37-
38- return LLM_MODELS_ROOT
39-
40-
4131QWEN2_5_VL_7B_CONFIG = {
4232 "architectures" : ["Qwen2_5_VLForConditionalGeneration" ],
4333 "attention_dropout" : 0.0 ,
@@ -222,7 +212,8 @@ def get_hf_inputs(self, model, modality: str, device: torch.device,
222212 images = [input ['multi_modal_data' ]['image' ] for input in inputs ]
223213 elif modality == "video" :
224214 videos = [
225- input ['multi_modal_data' ][f'{ modality } ' ] for input in inputs
215+ input ['multi_modal_data' ][f'{ modality } ' ][0 ].frames
216+ for input in inputs
226217 ]
227218 elif modality == "text" :
228219 # For text-only modality, no images or videos needed
@@ -240,7 +231,6 @@ def get_hf_inputs(self, model, modality: str, device: torch.device,
240231 ).to (device )
241232 return processor_inputs
242233
243- @pytest .mark .skip (reason = "https://nvbugs/5550722" )
244234 def test_qwen2_5_vl_sanity (self ):
245235
246236 config_dict = deepcopy (QWEN2_5_VL_7B_CONFIG )
@@ -359,7 +349,6 @@ def test_qwen2_5_vl_sanity(self):
359349 use_cuda_graph = False ,
360350 disable_fuse_rope = False ),
361351 ])
362- @pytest .mark .skip (reason = "https://nvbugs/5550722" )
363352 @torch .no_grad ()
364353 def test_qwen2_5_vl_allclose_to_hf (self , scenario : Scenario ) -> None :
365354 """
0 commit comments