@@ -9254,16 +9254,27 @@ class GLM4VMoEVisionModel(MmprojModel):
92549254
92559255 ref: [#16600](https://github.com/ggml-org/llama.cpp/pull/16600)"""
92569256 #
9257- # TODO: this is not complete yet! need to handle custom RoPE nonsense.
9257+ # TODO: this is not complete yet!
92589258 #
92599259 def set_gguf_parameters (self ):
92609260 super ().set_gguf_parameters ()
9261- self .gguf_writer .add_clip_projector_type (gguf .VisionProjectorType .GLM4V )
9261+ self .gguf_writer .add_clip_projector_type (gguf .VisionProjectorType .GLM4V )
92629262 self .gguf_writer .add_vision_use_gelu (True )
9263+
92639264 if (ln_eps := self .find_vparam (["layer_norm_eps" ], optional = True )) is not None :
92649265 self .gguf_writer .add_vision_attention_layernorm_eps (ln_eps )
92659266
9267+ # the ViT in GLM-4.5V applies its own RoPE inside its attention blocks
9268+ if (rope_theta := self .find_vparam (["rope_theta" ], optional = True )) is not None :
9269+ self .gguf_writer .add_vision_rope_freq_base (rope_theta )
9270+ logger .info (f"gguf: vision rope theta = { rope_theta } " )
9271+ else :
9272+ logger .warning ('gguf: -------------------------------------------------------------' )
9273+ logger .warning ('gguf: missing vision rope theta! the conversion might be incorrect!' )
9274+ logger .warning ('gguf: -------------------------------------------------------------' )
9275+
92669276 def modify_tensors (self , data_torch : Tensor , name : str , bid : int | None ) -> Iterable [tuple [str , Tensor ]]:
9277+ del bid # unused
92679278 if name .startswith ("model.visual." ):
92689279 yield self .map_tensor_name (name ), data_torch
92699280 else :
0 commit comments