@@ -10,18 +10,18 @@ homepage = "https://github.com/kozistr/pytorch_optimizer"
1010repository = " https://github.com/kozistr/pytorch_optimizer"
1111documentation = " https://pytorch-optimizers.readthedocs.io/en/latest"
1212keywords = [
13- " pytorch" , " deep-learning" , " optimizer" , " lr scheduler" , " A2Grad" , " ASGD " , " AccSGD " , " AdaBelief " , " AdaBound " ,
14- " AdaDelta" , " AdaFactor" , " AdaGC" , " AdaMax" , " AdamG" , " AdaMod" , " AdaNorm" , " AdaPNM" , " AdaSmooth" , " AdEMAMix " ,
15- " Simplified-AdEMAMix" , " ADOPT" , " AdaHessian" , " Adai" , " Adalite" , " AdaLomo" , " AdamMini" , " AdamP" , " AdamS " , " Adan " ,
16- " AggMo" , " Aida" , " AliG" , " Amos" , " Apollo" , " APOLLO" , " AvaGrad" , " bSAM" , " CAME" , " DAdaptAdaGrad" , " DAdaptAdam " ,
17- " DAdaptAdan" , " DAdaptSGD" , " DAdaptLion" , " DeMo" , " DiffGrad" , " EXAdam" , " FAdam" , " FOCUS " , " Fromage " , " FTRL " ,
18- " GaLore" , " Grams" , " Gravity" , " GrokFast" , " GSAM" , " Kate" , " Lamb" , " LaProp" , " LARS" , " Lion" , " LOMO " , " Lookahead " ,
19- " MADGRAD" , " MARS" , " MSVAG" , " Muno" , " Nero" , " NovoGrad" , " OrthoGrad" , " PAdam" , " PCGrad" , " PID" , " PNM " , " Prodigy " ,
20- " PSGD" , " QHAdam" , " QHM" , " RAdam" , " Ranger" , " Ranger21" , " RotoGrad" , " SAM" , " GCSAM" , " LookSAM " , " ScheduleFreeSGD " ,
21- " ScheduleFreeAdamW " , " ScheduleFreeRAdam " , " SCION " , " SGDP " , " Shampoo " , " ScalableShampoo " , " SGDW " , " SignSGD " , " SM3 " ,
22- " SOAP " , " SopihaH " , " SPAM " , " StableSPAM " , " SRMM " , " StableAdamW " , " SWATS " , " TAM " , " Tiger " , " TRAC " , " WSAM " , " Yogi " ,
23- " BCE " , " BCEFocal " , " Focal " , " FocalCosine " , " SoftF1 " , " Dice " , " LDAM " , " Jaccard " , " Bi-Tempered " , " Tversky " ,
24- " FocalTversky" , " LovaszHinge" , " bitsandbytes" , " WSD" , " QGaLore" ,
13+ " pytorch" , " deep-learning" , " optimizer" , " lr scheduler" , " A2Grad" , " Alice " , " ASGD " , " AccSGD " , " AdaBelief " ,
14+ " AdaBound " , " AdaDelta" , " AdaFactor" , " AdaGC" , " AdaMax" , " AdamG" , " AdaMod" , " AdaNorm" , " AdaPNM" , " AdaSmooth" ,
15+ " AdEMAMix " , " Simplified-AdEMAMix" , " ADOPT" , " AdaHessian" , " Adai" , " Adalite" , " AdaLomo" , " AdamMini" , " AdamP" ,
16+ " AdamS " , " Adan " , " AggMo" , " Aida" , " AliG" , " Amos" , " Apollo" , " APOLLO" , " AvaGrad" , " bSAM" , " CAME" , " DAdaptAdaGrad" ,
17+ " DAdaptAdam " , " DAdaptAdan" , " DAdaptSGD" , " DAdaptLion" , " DeMo" , " DiffGrad" , " EXAdam" , " FAdam" , " Fira " , " FOCUS " ,
18+ " Fromage " , " FTRL " , " GaLore" , " Grams" , " Gravity" , " GrokFast" , " GSAM" , " Kate" , " Lamb" , " LaProp" , " LARS" , " Lion" ,
19+ " LOMO " , " Lookahead " , " MADGRAD" , " MARS" , " MSVAG" , " Muno" , " Nero" , " NovoGrad" , " OrthoGrad" , " PAdam" , " PCGrad" , " PID" ,
20+ " PNM " , " Prodigy " , " PSGD" , " QHAdam" , " QHM" , " RACS " , " RAdam" , " Ranger" , " Ranger21" , " RotoGrad" , " SAM" , " GCSAM" ,
21+ " LookSAM " , " ScheduleFreeSGD " , " ScheduleFreeAdamW " , " ScheduleFreeRAdam " , " SCION " , " SGDP " , " Shampoo " ,
22+ " ScalableShampoo " , " SGDW " , " SignSGD " , " SM3 " , " SOAP " , " SopihaH " , " SPAM " , " StableSPAM " , " SRMM " , " StableAdamW " ,
23+ " SWATS " , " TAM " , " Tiger " , " TRAC " , " WSAM " , " Yogi " , " BCE " , " BCEFocal " , " Focal " , " FocalCosine " , " SoftF1 " , " Dice " ,
24+ " LDAM " , " Jaccard " , " Bi-Tempered " , " Tversky " , " FocalTversky" , " LovaszHinge" , " bitsandbytes" , " WSD" , " QGaLore" ,
2525]
2626classifiers = [
2727 " License :: OSI Approved :: Apache Software License" ,
0 commit comments