@@ -7147,28 +7147,10 @@ def set_gguf_parameters(self):
71477147 raise ValueError (f"Unsupported scoring_func value: { self .hparams ['scoring_func' ]} " )
71487148
71497149 block_count = self .find_hparam (["num_hidden_layers" , "n_layer" ])
7150- n_embd = self .find_hparam (["hidden_size" , "n_embd" ])
7151- n_head = self .find_hparam (["num_attention_heads" , "n_head" ])
7152- n_head_kv = self .find_hparam (["num_key_value_heads" , "n_head_kv" ])
7153- rms_eps = self .find_hparam (["rms_norm_eps" ])
7154- max_pos_embds = self .find_hparam (["n_positions" , "max_position_embeddings" ])
7155- head_dim = self .find_hparam (["head_dim" ])
7156-
7157- self .gguf_writer .add_context_length (max_pos_embds )
7158- self .gguf_writer .add_embedding_length (n_embd )
7159- self .gguf_writer .add_feed_forward_length (self .find_hparam (["intermediate_size" ]))
71607150 self .gguf_writer .add_expert_feed_forward_length (self .find_hparam (["intermediate_size" ]))
7161- self .gguf_writer .add_expert_count (self .find_hparam (["num_local_experts" ]))
7162- self .gguf_writer .add_expert_used_count (self .find_hparam (["num_experts_per_tok" ]))
71637151 self .gguf_writer .add_block_count (block_count )
7164- self .gguf_writer .add_head_count (n_head )
7165- self .gguf_writer .add_head_count_kv (n_head_kv )
7166- self .gguf_writer .add_layer_norm_rms_eps (rms_eps )
7167- self .gguf_writer .add_layer_norm_eps (rms_eps )
7168- self .gguf_writer .add_key_length (head_dim )
7169- self .gguf_writer .add_value_length (head_dim )
71707152 self .gguf_writer .add_rope_dimension_count (self .find_hparam (["rotary_dim" ]))
7171- self . gguf_writer . add_rope_freq_base ( self . find_hparam ([ "rope_theta" ]) )
7153+ super (). set_gguf_parameters ( )
71727154
71737155 def modify_tensors (self , data_torch : Tensor , name : str , bid : int | None ):
71747156 if name .endswith ("e_score_correction_bias" ):
0 commit comments