File tree Expand file tree Collapse file tree 2 files changed +2
-2
lines changed Expand file tree Collapse file tree 2 files changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -98,7 +98,7 @@ struct llama_hparams {
9898 float rope_freq_scale_train;
9999 float rope_freq_scale_train_swa;
100100 uint32_t n_ctx_orig_yarn;
101- float rope_yarn_log_mul;
101+ float rope_yarn_log_mul = 0 . 0f ;
102102
103103 std::array<int , 4 > rope_sections;
104104
Original file line number Diff line number Diff line change @@ -1369,7 +1369,7 @@ void llama_model::load_hparams(llama_model_loader & ml) {
13691369 // that have no expert_gating_func model parameter set
13701370 hparams.expert_gating_func = LLAMA_EXPERT_GATING_FUNC_TYPE_SOFTMAX;
13711371 }
1372- ml.get_key(LLM_KV_ROPE_SCALING_YARN_LOG_MUL, hparams.rope_yarn_log_mul);
1372+ ml.get_key(LLM_KV_ROPE_SCALING_YARN_LOG_MUL, hparams.rope_yarn_log_mul, false );
13731373
13741374 switch (hparams.n_layer) {
13751375 case 27: type = LLM_TYPE_16B; break;
You can’t perform that action at this time.
0 commit comments