Skip to content

Commit 3a83012

Browse files
author
sufubao
committed
reformat
1 parent 0184a83 commit 3a83012

File tree

2 files changed

+2
-6
lines changed

2 files changed

+2
-6
lines changed

lightllm/common/basemodel/layer_weights/meta_weights/fused_moe_weight_ep.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -359,9 +359,7 @@ def prefilled_group_gemm(
359359
######################################## warning ##################################################
360360
# here is used to match autotune feature, make moe model run same triton kernel in different rank.
361361
# in some special case, one rank will recv 0 token, so add a token to make it run triton kernel.
362-
if (
363-
get_triton_autotune_level() in [AutotuneLevel.ADAPTIVE_AUTOTUNE, AutotuneLevel.FORCE_AUTOTUNE]
364-
):
362+
if get_triton_autotune_level() in [AutotuneLevel.ADAPTIVE_AUTOTUNE, AutotuneLevel.FORCE_AUTOTUNE]:
365363
_gemm_out_a = torch.zeros((1, N), device=device, dtype=hidden_dtype)
366364
_silu_out = torch.zeros((1, N // 2), device=device, dtype=hidden_dtype)
367365
silu_and_mul_fwd(_gemm_out_a.view(-1, N), _silu_out)

lightllm/common/fused_moe/grouped_fused_moe_ep.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -192,9 +192,7 @@ def fused_experts_impl(
192192
######################################## warning ##################################################
193193
# here is used to match autotune feature, make moe model run same triton kernel in different rank.
194194
# in some special case, one rank will recv 0 token, so add a token to make it run triton kernel.
195-
if (
196-
get_triton_autotune_level() in [AutotuneLevel.ADAPTIVE_AUTOTUNE, AutotuneLevel.FORCE_AUTOTUNE]
197-
):
195+
if get_triton_autotune_level() in [AutotuneLevel.ADAPTIVE_AUTOTUNE, AutotuneLevel.FORCE_AUTOTUNE]:
198196
_gemm_out_a = torch.zeros((1, N), device=hidden_states.device, dtype=hidden_states.dtype)
199197
_silu_out = torch.zeros((1, N // 2), device=hidden_states.device, dtype=hidden_states.dtype)
200198
silu_and_mul_fwd(_gemm_out_a.view(-1, N), _silu_out)

0 commit comments

Comments
 (0)