Skip to content

Commit ad230b8

Browse files
committed
update: Kate optimizer
1 parent 3eb4bc1 commit ad230b8

File tree

3 files changed

+5
-1
lines changed

3 files changed

+5
-1
lines changed

pytorch_optimizer/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@
6666
from pytorch_optimizer.optimizer.gc import centralize_gradient
6767
from pytorch_optimizer.optimizer.gravity import Gravity
6868
from pytorch_optimizer.optimizer.grokfast import GrokFastAdamW, gradfilter_ema, gradfilter_ma
69+
from pytorch_optimizer.optimizer.kate import Kate
6970
from pytorch_optimizer.optimizer.lamb import Lamb
7071
from pytorch_optimizer.optimizer.lars import LARS
7172
from pytorch_optimizer.optimizer.lion import Lion
@@ -199,6 +200,7 @@
199200
ScheduleFreeAdamW,
200201
FAdam,
201202
GrokFastAdamW,
203+
Kate,
202204
]
203205
OPTIMIZERS: Dict[str, OPTIMIZER] = {str(optimizer.__name__).lower(): optimizer for optimizer in OPTIMIZER_LIST}
204206

tests/constants.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@
5050
GaLore,
5151
Gravity,
5252
GrokFastAdamW,
53+
Kate,
5354
Lamb,
5455
Lion,
5556
Nero,
@@ -461,6 +462,7 @@
461462
(ScheduleFreeAdamW, {'lr': 1e0, 'weight_decay': 1e-3}, 5),
462463
(FAdam, {'lr': 1e0, 'weight_decay': 1e-3}, 5),
463464
(GrokFastAdamW, {'lr': 1e0, 'weight_decay': 1e-3}, 10),
465+
(Kate, {'lr': 5e-2}, 10),
464466
]
465467
ADANORM_SUPPORTED_OPTIMIZERS: List[Tuple[Any, Dict[str, Union[float, bool, int]], int]] = [
466468
(AdaBelief, {'lr': 5e-1, 'weight_decay': 1e-3, 'adanorm': True}, 10),

tests/test_load_modules.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def test_load_lr_scheduler_invalid(invalid_lr_scheduler_names):
3838

3939

4040
def test_get_supported_optimizers():
41-
assert len(get_supported_optimizers()) == 68
41+
assert len(get_supported_optimizers()) == 69
4242

4343

4444
def test_get_supported_lr_schedulers():

0 commit comments

Comments
 (0)