@@ -1778,15 +1778,10 @@ def load_model_hook(models, input_dir):
17781778 if not args .enable_t5_ti :
17791779 # pure textual inversion - only clip
17801780 if pure_textual_inversion :
1781- params_to_optimize = [
1782- text_parameters_one_with_lr ,
1783- ]
1781+ params_to_optimize = [text_parameters_one_with_lr ]
17841782 te_idx = 0
17851783 else : # regular te training or regular pivotal for clip
1786- params_to_optimize = [
1787- transformer_parameters_with_lr ,
1788- text_parameters_one_with_lr ,
1789- ]
1784+ params_to_optimize = [transformer_parameters_with_lr , text_parameters_one_with_lr ]
17901785 te_idx = 1
17911786 elif args .enable_t5_ti :
17921787 # pivotal tuning of clip & t5
@@ -1809,9 +1804,7 @@ def load_model_hook(models, input_dir):
18091804 ]
18101805 te_idx = 1
18111806 else :
1812- params_to_optimize = [
1813- transformer_parameters_with_lr ,
1814- ]
1807+ params_to_optimize = [transformer_parameters_with_lr ]
18151808
18161809 # Optimizer creation
18171810 if not (args .optimizer .lower () == "prodigy" or args .optimizer .lower () == "adamw" ):
@@ -1871,7 +1864,6 @@ def load_model_hook(models, input_dir):
18711864 params_to_optimize [- 1 ]["lr" ] = args .learning_rate
18721865 optimizer = optimizer_class (
18731866 params_to_optimize ,
1874- lr = args .learning_rate ,
18751867 betas = (args .adam_beta1 , args .adam_beta2 ),
18761868 beta3 = args .prodigy_beta3 ,
18771869 weight_decay = args .adam_weight_decay ,
0 commit comments