@@ -1752,7 +1752,7 @@ class Mistral3Model(LlamaModel):
17521752
17531753 # we need to merge the text_config into the root level of hparams
17541754 def __init__ (self , * args , ** kwargs ):
1755- hparams = Model .load_hparams (kwargs [ "dir_model" ])
1755+ hparams = kwargs [ "hparams" ] if "hparams" in kwargs else Model .load_hparams (args [ 0 ])
17561756 if "text_config" in hparams :
17571757 hparams = {** hparams , ** hparams ["text_config" ]}
17581758 kwargs ["hparams" ] = hparams
@@ -3385,7 +3385,7 @@ class Gemma3Model(Model):
33853385
33863386 # we need to merge the text_config into the root level of hparams
33873387 def __init__ (self , * args , ** kwargs ):
3388- hparams = Model .load_hparams (kwargs [ "dir_model" ])
3388+ hparams = kwargs [ "hparams" ] if "hparams" in kwargs else Model .load_hparams (args [ 0 ])
33893389 if "text_config" in hparams :
33903390 hparams = {** hparams , ** hparams ["text_config" ]}
33913391 kwargs ["hparams" ] = hparams
@@ -5358,7 +5358,7 @@ def main() -> None:
53585358 logger .error (f"Model { model_architecture } is not supported" )
53595359 sys .exit (1 )
53605360
5361- model_instance = model_class (dir_model = dir_model , ftype = output_type , fname_out = fname_out ,
5361+ model_instance = model_class (dir_model , output_type , fname_out ,
53625362 is_big_endian = args .bigendian , use_temp_file = args .use_temp_file ,
53635363 eager = args .no_lazy ,
53645364 metadata_override = args .metadata , model_name = args .model_name ,
0 commit comments