@@ -300,15 +300,17 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans
300300            try :
301301                inject_adapter_in_model (lora_config , self , adapter_name = adapter_name , ** peft_kwargs )
302302                incompatible_keys  =  set_peft_model_state_dict (self , state_dict , adapter_name , ** peft_kwargs )
303-             except  RuntimeError  as  e :
304-                 for  module  in  self .modules ():
305-                     if  isinstance (module , BaseTunerLayer ):
306-                         active_adapters  =  module .active_adapters 
307-                         for  active_adapter  in  active_adapters :
308-                             if  adapter_name  in  active_adapter :
309-                                 module .delete_adapter (adapter_name )
310- 
311-                 self .peft_config .pop (adapter_name )
303+             except  Exception  as  e :
304+                 # In case `inject_adapter_in_model()` was unsuccessful even before injecting the `peft_config`. 
305+                 if  getattr (self , "peft_config" , None ) is  not None :
306+                     for  module  in  self .modules ():
307+                         if  isinstance (module , BaseTunerLayer ):
308+                             active_adapters  =  module .active_adapters 
309+                             for  active_adapter  in  active_adapters :
310+                                 if  adapter_name  in  active_adapter :
311+                                     module .delete_adapter (adapter_name )
312+ 
313+                     self .peft_config .pop (adapter_name )
312314                logger .error (f"Loading { adapter_name } \n { e }  )
313315                raise 
314316
0 commit comments