Skip to content

Commit 973b02d

Browse files
committed
refactor: head_lr
1 parent 3499ffe commit 973b02d

File tree

1 file changed

+2
-6
lines changed

1 file changed

+2
-6
lines changed

pytorch_optimizer/experimental/deberta_v3_lr_scheduler.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ def deberta_v3_large_lr_scheduler(
99
model: nn.Module,
1010
head_param_start: int = 390,
1111
base_lr: float = 2e-5,
12-
head_lr: Optional[float] = None,
12+
head_lr: float = 1e-4,
1313
wd: float = 1e-2,
1414
) -> PARAMETERS:
1515
"""DeBERTa-v3 large layer-wise lr scheduler
@@ -28,11 +28,7 @@ def deberta_v3_large_lr_scheduler(
2828

2929
regressor_group = [params for (_, params) in regressor_parameters]
3030

31-
parameters = []
32-
if head_lr is not None:
33-
parameters.append({'params': regressor_group, 'lr': head_lr})
34-
else:
35-
parameters.append({'params': regressor_group})
31+
parameters = [{'params': regressor_group, 'lr': head_lr}]
3632

3733
layer_low_threshold: int = 195 # start of the 12 layers
3834
layer_middle_threshold: int = 323 # end of the 24 layers

0 commit comments

Comments
 (0)