Skip to content

Commit 3328341

Browse files
committed
fix
1 parent d2bd709 commit 3328341

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

lightllm/common/fused_moe/grouped_fused_moe.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ def moe_align1_kernel(
118118
experts_topk_weight_stride0,
119119
experts_topk_weight_stride1,
120120
TOKEN_BLOCK_SIZE: tl.constexpr,
121-
num_stages: tl.constexpr,
121+
range_num_stages: tl.constexpr,
122122
):
123123

124124
expert_id = tl.program_id(axis=0)
@@ -127,7 +127,7 @@ def moe_align1_kernel(
127127

128128
pre_sum = 0
129129

130-
for start_loc in tl.range(0, experts_info_n, TOKEN_BLOCK_SIZE, num_stages=num_stages):
130+
for start_loc in tl.range(0, experts_info_n, TOKEN_BLOCK_SIZE, num_stages=range_num_stages):
131131
n_range = start_loc + off_n
132132
topk_weights_data = tl.load(topk_weights + n_range, mask=n_range < experts_info_n, other=0)
133133
expert_data = tl.load(
@@ -213,7 +213,7 @@ def moe_align1(
213213
experts_weight_info.stride(0),
214214
experts_weight_info.stride(1),
215215
TOKEN_BLOCK_SIZE=TOKEN_BLOCK_SIZE,
216-
num_stages=4,
216+
range_num_stages=4,
217217
num_warps=8,
218218
num_stages=1,
219219
)

0 commit comments

Comments
 (0)