We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent aa1240a commit 93ba573Copy full SHA for 93ba573
src/axolotl/monkeypatch/lora_kernels.py
@@ -134,6 +134,11 @@ def get_attention_cls_from_config(cfg: DictDefault) -> Type[nn.Module]:
134
135
return Qwen2Attention
136
137
+ if model_type == "qwen3_vl":
138
+ from transformers.models.qwen3_vl.modeling_qwen3_vl import Qwen3VLTextAttention
139
+
140
+ return Qwen3VLTextAttention
141
142
if model_type == "mllama":
143
from transformers.models.mllama.modeling_mllama import MllamaTextSelfAttention
144
0 commit comments