@@ -1734,17 +1734,18 @@ def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iter
17341734 n_kv_head = self .hparams .get ("num_key_value_heads" )
17351735 is_vision_tensor = "vision_tower" in name or "vision_model" in name
17361736
1737- # For vision model
1738- if name .startswith ("language_model" ):
1739- name = name .replace ("language_model." , "" )
1740- if name .startswith ("model.text_model" ):
1741- name = name .replace ("text_model." , "" ) # for SmolVLM
1742- else :
1743- name = name .replace ("model.vision_tower." , "" )
1744- if "post_layernorm" in name and self .vision_arch != gguf .MODEL_ARCH .VISION_IDEFICS3 :
1745- return [] # skip post_layernorm
1737+ if is_vision_tensor :
1738+ if name .startswith ("model.text_model" ):
1739+ name = name .replace ("text_model." , "" ) # for SmolVLM
1740+ else :
1741+ name = name .replace ("model.vision_tower." , "" )
1742+ if "post_layernorm" in name and self .vision_arch != gguf .MODEL_ARCH .VISION_IDEFICS3 :
1743+ return [] # skip post_layernorm
17461744
17471745 if not is_vision_tensor :
1746+ if name .startswith ("language_model" ):
1747+ # language model tensors, remove the prefix
1748+ name = name .replace ("language_model." , "" )
17481749 if name .endswith (("q_proj.weight" , "q_proj.bias" )):
17491750 data_torch = LlamaModel .permute (data_torch , n_head , n_head )
17501751 if name .endswith (("k_proj.weight" , "k_proj.bias" )):
0 commit comments