@@ -202,9 +202,8 @@ def quantize_model(
202202 self ._quantize_layers (model , stats )
203203
204204 return model
205-
206205 except Exception as e :
207- self .logger .error (f"Error during quantization: { str (e )} " )
206+ self .logger .log_error (f"Error during quantization: { str (e )} " )
208207 raise
209208
210209 def _prepare_model (self , model : PreTrainedModel ) -> PreTrainedModel :
@@ -375,11 +374,11 @@ def export_model(
375374 model .optimize ()
376375 onnx .save (model , path )
377376
378- else :
377+ else :
379378 raise ValueError (f"Unsupported export format: { format } " )
380379
381380 except Exception as e :
382- self .logger .error (f"Error exporting model: { str (e )} " )
381+ self .logger .log_error (f"Error exporting model: { str (e )} " )
383382 raise
384383
385384 def benchmark (
@@ -441,8 +440,8 @@ def benchmark(
441440 "p99_latency" : torch .quantile (latencies , 0.99 ).item ()
442441 }
443442
444- except Exception as e :
445- self .logger .error (f"Error during benchmarking: { str (e )} " )
443+ except Exception as e :
444+ self .logger .log_error (f"Error during benchmarking: { str (e )} " )
446445 raise
447446
448447class BaseQuantizer :
@@ -560,15 +559,15 @@ def _prepare_model(self, original_model: PreTrainedModel):
560559 from transformers import AutoModelForCausalLM
561560
562561 try :
563- # Create new model instance
564- self .logger .info ("Creating new model instance..." )
562+ # Create new model instance
563+ self .logger .log_info ("Creating new model instance..." )
565564 new_model = AutoModelForCausalLM .from_config (
566565 self .model_config ,
567566 trust_remote_code = True
568567 )
569568
570- # Copy state dict with proper device handling
571- self .logger .info ("Copying model parameters..." )
569+ # Copy state dict with proper device handling
570+ self .logger .log_info ("Copying model parameters..." )
572571 with torch .no_grad ():
573572 state_dict = {}
574573 for name , param in original_model .state_dict ().items ():
@@ -586,10 +585,10 @@ def _prepare_model(self, original_model: PreTrainedModel):
586585 new_model = new_model .to (self .device_manager .primary_device )
587586
588587 self ._model = new_model
589- self .logger .info ("Model preparation completed successfully" )
588+ self .logger .log_info ("Model preparation completed successfully" )
590589
591- except Exception as e :
592- self .logger .error (f"Failed to prepare model: { str (e )} " )
590+ except Exception as e :
591+ self .logger .log_error (f"Failed to prepare model: { str (e )} " )
593592 raise
594593
595594 def prepare_calibration_data (self , calibration_data : torch .Tensor ) -> torch .Tensor :
0 commit comments