File tree Expand file tree Collapse file tree 1 file changed +21
-0
lines changed
src/training/training_lora/classifier_model_fine_tuning_lora Expand file tree Collapse file tree 1 file changed +21
-0
lines changed Original file line number Diff line number Diff line change @@ -659,6 +659,27 @@ def merge_lora_adapter_to_full_model(
659659 )
660660 logger .info ("Created category_mapping.json" )
661661
662+ # Create lora_config.json for Rust router detection
663+ # This file signals to the Rust router that this is a LoRA-trained model
664+ # and should be routed to the LoRA inference path
665+ logger .info ("Creating lora_config.json for LoRA model detection..." )
666+ lora_config = {
667+ "rank" : 16 , # LoRA rank (r) - matches training configuration
668+ "alpha" : 32 , # LoRA alpha scaling factor
669+ "dropout" : 0.1 , # LoRA dropout rate
670+ "target_modules" : [
671+ "attention.self.query" ,
672+ "attention.self.value" ,
673+ "attention.output.dense" ,
674+ "intermediate.dense" ,
675+ "output.dense" ,
676+ ],
677+ }
678+ lora_config_path = os .path .join (output_path , "lora_config.json" )
679+ with open (lora_config_path , "w" ) as f :
680+ json .dump (lora_config , f )
681+ logger .info (f"Created { lora_config_path } " )
682+
662683 logger .info ("LoRA adapter merged successfully!" )
663684
664685
You can’t perform that action at this time.
0 commit comments