We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent cda776d commit 93ab9b8Copy full SHA for 93ab9b8
pytorch_optimizer/optimizer/fp16.py
@@ -90,7 +90,7 @@ def decrease_loss_scale(self):
90
self.loss_scale = max(self.loss_scale, self.threshold)
91
92
93
-class SafeFP16Optimizer(Optimizer):
+class SafeFP16Optimizer(Optimizer): # pragma: no cover
94
r"""Safe FP16 Optimizer.
95
96
:param optimizer: OPTIMIZER.
0 commit comments