@@ -7997,15 +7997,13 @@ def repack_mxfp4(self, new_name: str, blocks: Tensor, scales: Tensor):
79977997 def generate_extra_tensors (self ) -> Iterable [tuple [str , Tensor ]]:
79987998 blocks0 : Tensor = torch .zeros (1 )
79997999 blocks1 : Tensor = torch .zeros (1 )
8000- found_mxfp4_tensors = False
80018000 # we assume that tensors are loaded in the correct order
80028001 for name , data_torch in self .get_tensors ():
80038002 if "mlp.experts.down_proj_blocks" in name :
80048003 blocks0 = data_torch
80058004 elif "mlp.experts.down_proj_scales" in name :
80068005 new_name = self .map_tensor_name (name .replace ("_scales" , ".weight" ))
80078006 self .repack_mxfp4 (new_name , blocks0 , data_torch )
8008- found_mxfp4_tensors = True
80098007 elif "mlp.experts.gate_up_proj_blocks" in name :
80108008 blocks0 , blocks1 = data_torch [:, ::2 , :, :], data_torch [:, 1 ::2 , :, :]
80118009 elif "mlp.experts.gate_up_proj_scales" in name :
@@ -8014,9 +8012,6 @@ def generate_extra_tensors(self) -> Iterable[tuple[str, Tensor]]:
80148012 new_name_up = self .map_tensor_name (name .replace ("gate_up_proj_scales" , "up_proj.weight" ))
80158013 self .repack_mxfp4 (new_name_gate , blocks0 , scales0 )
80168014 self .repack_mxfp4 (new_name_up , blocks1 , scales1 )
8017- found_mxfp4_tensors = True
8018- if not found_mxfp4_tensors :
8019- raise ValueError ("No MXFP4 tensors found in the model. Please make sure you are using MXFP4 model." )
80208015 return []
80218016
80228017 def modify_tensors (self , data_torch : Tensor , name : str , bid : int | None ) -> Iterable [tuple [str , Tensor ]]:
0 commit comments