Skip to content

Commit 1934086

Browse files
authored
Remove PP judgment for lora in Qwen2/Qwen3 (#2669)
1 parent d841351 commit 1934086

File tree

1 file changed

+4
-8
lines changed

1 file changed

+4
-8
lines changed

paddleformers/trl/llm_utils.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -38,10 +38,6 @@
3838
Glm4MoeForCausalLMPipe,
3939
LlamaForCausalLMPipe,
4040
PretrainedConfig,
41-
Qwen2ForCausalLMPipe,
42-
Qwen2MoeForCausalLMPipe,
43-
Qwen3ForCausalLMPipe,
44-
Qwen3MoeForCausalLMPipe,
4541
)
4642
from ..utils.log import logger
4743

@@ -114,7 +110,7 @@ def get_lora_target_modules(model):
114110
".*mlp.w2.*",
115111
".*mlp.c_proj.*",
116112
]
117-
elif model.config.model_type == "qwen2" or isinstance(model, Qwen2ForCausalLMPipe):
113+
elif model.config.model_type == "qwen2":
118114
target_modules = [
119115
".*q_proj.*",
120116
".*k_proj.*",
@@ -124,7 +120,7 @@ def get_lora_target_modules(model):
124120
".*down_proj.*",
125121
".*up_proj.*",
126122
]
127-
elif model.config.model_type == "qwen3" or isinstance(model, Qwen3ForCausalLMPipe):
123+
elif model.config.model_type == "qwen3":
128124
target_modules = [
129125
".*q_proj.*",
130126
".*k_proj.*",
@@ -156,7 +152,7 @@ def get_lora_target_modules(model):
156152
".*w2.*",
157153
".*w3.*",
158154
]
159-
elif model.config.model_type == "qwen2_moe" or isinstance(model, Qwen2MoeForCausalLMPipe):
155+
elif model.config.model_type == "qwen2_moe":
160156
target_modules = [
161157
".*q_proj.*",
162158
".*k_proj.*",
@@ -167,7 +163,7 @@ def get_lora_target_modules(model):
167163
".*up_proj.*",
168164
".*down_proj.*",
169165
]
170-
elif model.config.model_type == "qwen3_moe" or isinstance(model, Qwen3MoeForCausalLMPipe):
166+
elif model.config.model_type == "qwen3_moe":
171167
target_modules = [
172168
".*q_proj.*",
173169
".*k_proj.*",

0 commit comments

Comments
 (0)