@@ -11,17 +11,17 @@ repository = "https://github.com/kozistr/pytorch_optimizer"
1111documentation = " https://pytorch-optimizers.readthedocs.io/en/latest"
1212keywords = [
1313 " pytorch" , " deep-learning" , " optimizer" , " lr scheduler" , " A2Grad" , " ASGD" , " AccSGD" , " AdaBelief" , " AdaBound" ,
14- " AdaDelta" , " AdaFactor" , " AdaMax" , " AdamG" , " AdaMod" , " AdaNorm" , " AdaPNM" , " AdaSmooth" , " AdEMAMix" , " ADOPT " ,
15- " AdaHessian " , " Adai " , " Adalite " , " AdaLomo " , " AdamMini " , " AdamP " , " AdamS " , " Adan " , " AggMo " , " Aida " , " AliG " , " Amos " ,
16- " Apollo " , " APOLLO " , " AvaGrad " , " bSAM " , " CAME " , " DAdaptAdaGrad " , " DAdaptAdam " , " DAdaptAdan " , " DAdaptSGD " ,
17- " DAdaptLion " , " DeMo " , " DiffGrad " , " EXAdam " , " FAdam " , " FOCUS " , " Fromage " , " FTRL " , " GaLore " , " Grams " , " Gravity " ,
18- " GrokFast " , " GSAM " , " Kate " , " Lamb " , " LaProp " , " LARS " , " Lion " , " LOMO " , " Lookahead " , " MADGRAD " , " MARS " , " MSVAG " ,
19- " Muno " , " Nero " , " NovoGrad " , " OrthoGrad " , " PAdam " , " PCGrad " , " PID " , " PNM " , " Prodigy " , " PSGD " , " QHAdam " , " QHM " ,
20- " RAdam" , " Ranger" , " Ranger21" , " RotoGrad" , " SAM" , " GCSAM" , " LookSAM" , " ScheduleFreeSGD" , " ScheduleFreeAdamW " ,
21- " ScheduleFreeRAdam" , " SCION" , " SGDP" , " Shampoo" , " ScalableShampoo" , " SGDW" , " SignSGD" , " SM3" , " SOAP " , " SopihaH " ,
22- " SPAM" , " StableSPAM" , " SRMM" , " StableAdamW" , " SWATS" , " TAM" , " Tiger" , " TRAC" , " WSAM" , " Yogi" , " BCE " , " BCEFocal " ,
23- " Focal" , " FocalCosine" , " SoftF1" , " Dice" , " LDAM" , " Jaccard" , " Bi-Tempered" , " Tversky" , " FocalTversky " ,
24- " LovaszHinge" , " bitsandbytes" , " WSD" , " QGaLore" ,
14+ " AdaDelta" , " AdaFactor" , " AdaGC " , " AdaMax" , " AdamG" , " AdaMod" , " AdaNorm" , " AdaPNM" , " AdaSmooth" , " AdEMAMix" ,
15+ " Simplified-AdEMAMix " , " ADOPT " , " AdaHessian " , " Adai " , " Adalite " , " AdaLomo " , " AdamMini " , " AdamP " , " AdamS " , " Adan " ,
16+ " AggMo " , " Aida " , " AliG " , " Amos " , " Apollo " , " APOLLO " , " AvaGrad " , " bSAM " , " CAME " , " DAdaptAdaGrad " , " DAdaptAdam " ,
17+ " DAdaptAdan " , " DAdaptSGD " , " DAdaptLion " , " DeMo " , " DiffGrad " , " EXAdam " , " FAdam " , " FOCUS " , " Fromage " , " FTRL " ,
18+ " GaLore " , " Grams " , " Gravity " , " GrokFast " , " GSAM " , " Kate " , " Lamb " , " LaProp " , " LARS " , " Lion " , " LOMO " , " Lookahead " ,
19+ " MADGRAD " , " MARS " , " MSVAG " , " Muno " , " Nero " , " NovoGrad " , " OrthoGrad " , " PAdam " , " PCGrad " , " PID " , " PNM " , " Prodigy " ,
20+ " PSGD " , " QHAdam " , " QHM " , " RAdam" , " Ranger" , " Ranger21" , " RotoGrad" , " SAM" , " GCSAM" , " LookSAM" , " ScheduleFreeSGD" ,
21+ " ScheduleFreeAdamW " , " ScheduleFreeRAdam" , " SCION" , " SGDP" , " Shampoo" , " ScalableShampoo" , " SGDW" , " SignSGD" , " SM3" ,
22+ " SOAP " , " SopihaH " , " SPAM" , " StableSPAM" , " SRMM" , " StableAdamW" , " SWATS" , " TAM" , " Tiger" , " TRAC" , " WSAM" , " Yogi" ,
23+ " BCE " , " BCEFocal " , " Focal" , " FocalCosine" , " SoftF1" , " Dice" , " LDAM" , " Jaccard" , " Bi-Tempered" , " Tversky" ,
24+ " FocalTversky " , " LovaszHinge" , " bitsandbytes" , " WSD" , " QGaLore" ,
2525]
2626classifiers = [
2727 " License :: OSI Approved :: Apache Software License" ,
0 commit comments