We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c8cae85 commit fdafa34Copy full SHA for fdafa34
tests/tests_pytorch/tuner/test_lr_finder.py
@@ -599,7 +599,9 @@ def configure_optimizers(self):
599
600
# Verify learning rate finder ran and has results
601
assert lr_finder_callback.optimal_lr is not None, "Learning rate finder should have results"
602
- assert lr_finder_callback.optimal_lr.suggestion() > 0, "Learning rate suggestion should be positive"
+ suggestion = lr_finder_callback.optimal_lr.suggestion()
603
+ if suggestion is not None:
604
+ assert suggestion > 0, "Learning rate suggestion should be positive"
605
606
607
def test_gradient_correctness():
0 commit comments