Skip to content

Commit a3b6c2c

Browse files
authored
fix conflict (#10900)
1 parent 17089c8 commit a3b6c2c

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

paddlenlp/transformers/deepseek_v2/modeling_pp.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -170,15 +170,15 @@ def forward_without_residual(self, inputs):
170170
with paddle.no_grad():
171171
if self.shared_experts is not None:
172172
if self.using_post_norm_recompute:
173-
shared_expert_output = fp8_mlp_fwd_norm_rc(
173+
shared_expert_output = FP8LinearFunctionBase.fp8_mlp_fwd_norm_rc(
174174
hidden_states,
175175
self.shared_experts.norm_weight,
176176
self.shared_experts.norm_eps,
177177
self.shared_experts.w1,
178178
self.shared_experts.w2,
179179
)
180180
else:
181-
shared_expert_output = fp8_mlp_fwd(hidden_states, self.shared_experts.w1, self.shared_experts.w2)
181+
shared_expert_output = FP8LinearFunctionBase.fp8_mlp_fwd(hidden_states, self.shared_experts.w1, self.shared_experts.w2)
182182
residual = residual + shared_expert_output
183183

184184
self.x = hidden_states

0 commit comments

Comments
 (0)