Skip to content

Commit 0947959

Browse files
committed
update: load_optimizers
1 parent 97bfab4 commit 0947959

File tree

1 file changed

+1
-5
lines changed

1 file changed

+1
-5
lines changed

pytorch_optimizer/optimizers.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
from pytorch_optimizer.adamp import AdamP
55
from pytorch_optimizer.diffgrad import DiffGrad
66
from pytorch_optimizer.diffrgrad import DiffRGrad
7-
from pytorch_optimizer.fp16 import SafeFP16Optimizer
87
from pytorch_optimizer.lamb import Lamb
98
from pytorch_optimizer.lars import LARS
109
from pytorch_optimizer.madgrad import MADGRAD
@@ -15,7 +14,7 @@
1514
from pytorch_optimizer.sgdp import SGDP
1615

1716

18-
def load_optimizers(optimizer: str, use_fp16: bool = False):
17+
def load_optimizers(optimizer: str):
1918
optimizer: str = optimizer.lower()
2019

2120
if optimizer == 'adamp':
@@ -49,7 +48,4 @@ def load_optimizers(optimizer: str, use_fp16: bool = False):
4948
else:
5049
raise NotImplementedError(f'[-] not implemented optimizer : {optimizer}')
5150

52-
if use_fp16:
53-
opt = SafeFP16Optimizer(opt)
54-
5551
return opt

0 commit comments

Comments
 (0)