Skip to content

Commit 5e2335b

Browse files
committed
fix lint
Signed-off-by: wangxiyuan <[email protected]>
1 parent 4c53906 commit 5e2335b

File tree

1 file changed

+20
-27
lines changed

1 file changed

+20
-27
lines changed

vllm_ascend/models/deepseek_v2.py

Lines changed: 20 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -148,54 +148,47 @@ def __init__(
148148
self.enable_shared_expert_dp = ascend_config.enable_shared_expert_dp
149149

150150
if self.q_lora_rank is not None:
151-
self.q_a_proj = ReplicatedLinear(
152-
self.hidden_size,
153-
self.q_lora_rank,
154-
bias=False,
155-
quant_config=quant_config,
156-
prefix=f"{prefix}.q_a_proj"
157-
)
151+
self.q_a_proj = ReplicatedLinear(self.hidden_size,
152+
self.q_lora_rank,
153+
bias=False,
154+
quant_config=quant_config,
155+
prefix=f"{prefix}.q_a_proj")
158156
self.q_a_layernorm = RMSNorm(self.q_lora_rank,
159157
eps=config.rms_norm_eps)
160-
self.q_b_proj = ColumnParallelLinear(
161-
q_lora_rank,
162-
self.num_heads * self.qk_head_dim,
163-
bias=False,
164-
quant_config=quant_config,
165-
prefix=f"{prefix}.q_b_proj"
166-
)
158+
self.q_b_proj = ColumnParallelLinear(q_lora_rank,
159+
self.num_heads *
160+
self.qk_head_dim,
161+
bias=False,
162+
quant_config=quant_config,
163+
prefix=f"{prefix}.q_b_proj")
167164
else:
168-
self.q_proj = ColumnParallelLinear(
169-
self.hidden_size,
170-
self.num_heads * self.qk_head_dim,
171-
bias=False,
172-
quant_config=quant_config,
173-
prefix=f"{prefix}.q_proj"
174-
)
165+
self.q_proj = ColumnParallelLinear(self.hidden_size,
166+
self.num_heads *
167+
self.qk_head_dim,
168+
bias=False,
169+
quant_config=quant_config,
170+
prefix=f"{prefix}.q_proj")
175171

176172
self.kv_a_proj_with_mqa = ReplicatedLinear(
177173
self.hidden_size,
178174
self.kv_lora_rank + self.qk_rope_head_dim,
179175
bias=False,
180176
quant_config=quant_config,
181-
prefix=f"{prefix}.kv_a_proj_with_mqa"
182-
)
177+
prefix=f"{prefix}.kv_a_proj_with_mqa")
183178
self.kv_a_layernorm = RMSNorm(self.kv_lora_rank,
184179
eps=config.rms_norm_eps)
185180
self.kv_b_proj = ColumnParallelLinear(
186181
self.kv_lora_rank,
187182
self.num_heads * (self.qk_nope_head_dim + self.v_head_dim),
188183
bias=False,
189184
quant_config=quant_config,
190-
prefix=f"{prefix}.kv_b_proj"
191-
)
185+
prefix=f"{prefix}.kv_b_proj")
192186
self.o_proj = CustomDeepseekV2RowParallelLinear(
193187
self.num_heads * self.v_head_dim,
194188
self.hidden_size,
195189
bias=False,
196190
quant_config=quant_config,
197-
prefix=f"{prefix}.o_proj"
198-
)
191+
prefix=f"{prefix}.o_proj")
199192

200193
if rope_scaling:
201194
rope_scaling["rope_type"] = 'deepseek_yarn'

0 commit comments

Comments
 (0)