File tree Expand file tree Collapse file tree 1 file changed +2
-6
lines changed
pytorch_optimizer/experimental Expand file tree Collapse file tree 1 file changed +2
-6
lines changed Original file line number Diff line number Diff line change @@ -9,7 +9,7 @@ def deberta_v3_large_lr_scheduler(
99 model : nn .Module ,
1010 head_param_start : int = 390 ,
1111 base_lr : float = 2e-5 ,
12- head_lr : Optional [ float ] = None ,
12+ head_lr : float = 1e-4 ,
1313 wd : float = 1e-2 ,
1414) -> PARAMETERS :
1515 """DeBERTa-v3 large layer-wise lr scheduler
@@ -28,11 +28,7 @@ def deberta_v3_large_lr_scheduler(
2828
2929 regressor_group = [params for (_ , params ) in regressor_parameters ]
3030
31- parameters = []
32- if head_lr is not None :
33- parameters .append ({'params' : regressor_group , 'lr' : head_lr })
34- else :
35- parameters .append ({'params' : regressor_group })
31+ parameters = [{'params' : regressor_group , 'lr' : head_lr }]
3632
3733 layer_low_threshold : int = 195 # start of the 12 layers
3834 layer_middle_threshold : int = 323 # end of the 24 layers
You can’t perform that action at this time.
0 commit comments