We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 47926b7 commit 9423e8aCopy full SHA for 9423e8a
tests/llmcompressor/transformers/tracing/test_models.py
@@ -3,6 +3,7 @@
3
import pytest
4
from transformers import (
5
AutoModelForCausalLM,
6
+ Cohere2VisionForConditionalGeneration,
7
Gemma3ForConditionalGeneration,
8
Idefics3ForConditionalGeneration,
9
Llama4ForConditionalGeneration,
@@ -12,7 +13,6 @@
12
13
Qwen2_5_VLForConditionalGeneration,
14
Qwen2VLForConditionalGeneration,
15
WhisperForConditionalGeneration,
- Cohere2VisionForConditionalGeneration,
16
)
17
18
from llmcompressor.pipelines.sequential.helpers import match_modules
@@ -94,8 +94,7 @@
94
["Cohere2DecoderLayer"],
95
"vision",
96
[],
97
- )
98
- (
+ )(
99
"Qwen/Qwen2-VL-2B-Instruct",
100
101
["Qwen2VLDecoderLayer"],
0 commit comments