Skip to content

Commit 4267d3e

Browse files
CISCam17an
authored andcommitted
add missing norm topk bias
1 parent 03792ad commit 4267d3e

File tree

1 file changed

+2
-4
lines changed

1 file changed

+2
-4
lines changed

src/llama-graph.cpp

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1006,10 +1006,8 @@ ggml_tensor * llm_graph_context::build_moe_ffn(
10061006
ggml_tensor * weights_sum = ggml_sum_rows(ctx0, weights); // [1, n_tokens]
10071007
cb(weights_sum, "ffn_moe_weights_sum", il);
10081008

1009-
if (arch == LLM_ARCH_BAILINGMOE2) {
1010-
weights_sum = ggml_scale_bias(ctx0, weights_sum, 1.0, 1e-20);
1011-
cb(weights_sum, "ffn_moe_weights_sum_biased", il);
1012-
}
1009+
weights_sum = ggml_scale_bias(ctx0, weights_sum, 1.0, 1e-20);
1010+
cb(weights_sum, "ffn_moe_weights_sum_biased", il);
10131011

10141012
weights = ggml_div(ctx0, weights, weights_sum); // [n_expert_used, n_tokens]
10151013
cb(weights, "ffn_moe_weights_norm", il);

0 commit comments

Comments
 (0)