Skip to content

Commit a38c3fd

Browse files
committed
used triton grouped topk as default.
1 parent 383e5e2 commit a38c3fd

File tree

1 file changed

+3
-2
lines changed

1 file changed

+3
-2
lines changed

lightllm/common/fused_moe/topk_select.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,13 +102,14 @@ def select_experts(
102102
scoring_func: str = "softmax",
103103
custom_routing_function: Optional[Callable] = None,
104104
):
105-
from lightllm.common.fused_moe.topk_select import fused_topk, grouped_topk
105+
from lightllm.common.fused_moe.topk_select import fused_topk
106+
from lightllm.common.fused_moe.grouped_topk import triton_grouped_topk
106107

107108
# DeekSeekv2 uses grouped_top_k
108109
if use_grouped_topk:
109110
assert topk_group is not None
110111
assert num_expert_group is not None
111-
topk_weights, topk_ids = grouped_topk(
112+
topk_weights, topk_ids = triton_grouped_topk(
112113
hidden_states=hidden_states,
113114
gating_output=router_logits,
114115
correction_bias=correction_bias,

0 commit comments

Comments
 (0)