File tree Expand file tree Collapse file tree 1 file changed +5
-0
lines changed Expand file tree Collapse file tree 1 file changed +5
-0
lines changed Original file line number Diff line number Diff line change @@ -412,6 +412,7 @@ void llama_model::load_hparams(llama_model_loader & ml) {
412412 ml.get_key (LLM_KV_BLOCK_COUNT, hparams.n_layer );
413413 ml.get_key (LLM_KV_EXPERT_COUNT, hparams.n_expert , false );
414414 ml.get_key (LLM_KV_EXPERT_USED_COUNT, hparams.n_expert_used , false );
415+ ml.get_key (LLM_KV_EXPERT_WEIGHTS_SCALE, hparams.expert_weights_scale , false );
415416
416417 if (arch == LLM_ARCH_WAVTOKENIZER_DEC) {
417418 ml.get_key (LLM_KV_FEATURES_LENGTH, hparams.n_embd_features );
@@ -3677,6 +3678,10 @@ void llama_model::print_info() const {
36773678 LLAMA_LOG_INFO (" %s: f_attention_scale = %f\n " , __func__, hparams.f_attention_scale );
36783679 }
36793680
3681+ if (arch == LLM_ARCH_LLAMA) {
3682+ LLAMA_LOG_INFO (" %s: expert_weights_scale = %.1f\n " , __func__, hparams.expert_weights_scale );
3683+ }
3684+
36803685 vocab.print_info ();
36813686}
36823687
You can’t perform that action at this time.
0 commit comments