Skip to content

Commit 1eaff9b

Browse files
Al3xDoawaelchlipre-commit-ci[bot]Borda
authored
Fix incorrect tuner error message (#16104)
Co-authored-by: Adrian Wälchli <[email protected]> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Jirka Borovec <[email protected]> Co-authored-by: Jirka <[email protected]> Fixes #15855
1 parent 1c196da commit 1eaff9b

File tree

4 files changed

+52
-6
lines changed

4 files changed

+52
-6
lines changed

src/pytorch_lightning/tuner/tuning.py

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -296,8 +296,19 @@ def _check_tuner_configuration(
296296
" arguments should be None, please consider setting `dataloaders` instead."
297297
)
298298

299-
if any(isinstance(cb, (BatchSizeFinder, LearningRateFinder)) for cb in trainer.callbacks):
299+
configured_callbacks = []
300+
for cb in trainer.callbacks:
301+
if isinstance(cb, BatchSizeFinder) and trainer.auto_scale_batch_size:
302+
configured_callbacks.append("BatchSizeFinder")
303+
elif isinstance(cb, LearningRateFinder) and trainer.auto_lr_find:
304+
configured_callbacks.append("LearningRateFinder")
305+
if len(configured_callbacks) == 1:
300306
raise MisconfigurationException(
301-
"Trainer is already configured with a `BatchSizeFinder` callback. Please remove it if you"
302-
" want to use tuner."
307+
f"Trainer is already configured with a `{configured_callbacks[0]}` callback."
308+
"Please remove it if you want to use the Tuner."
309+
)
310+
elif len(configured_callbacks) == 2:
311+
raise MisconfigurationException(
312+
"Trainer is already configured with `LearningRateFinder` and `BatchSizeFinder` callbacks."
313+
" Please remove them if you want to use the Tuner."
303314
)

tests/tests_pytorch/tuner/test_lr_finder.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -435,10 +435,10 @@ def __init__(self):
435435
def test_if_lr_finder_callback_already_configured():
436436
"""Test that an error is raised if `LearningRateFinder` is already configured inside `Tuner`"""
437437
cb = LearningRateFinder()
438-
trainer = Trainer(auto_scale_batch_size=True, callbacks=cb)
438+
trainer = Trainer(auto_lr_find=True, callbacks=cb)
439439
model = BoringModel()
440440

441-
with pytest.raises(MisconfigurationException, match="Trainer is already configured with a .* callback"):
441+
with pytest.raises(MisconfigurationException, match="Trainer is already configured with a `LearningRateFinder`"):
442442
trainer.tune(model)
443443

444444

tests/tests_pytorch/tuner/test_scale_batch_size.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,7 @@ def test_if_batch_size_finder_callback_already_configured():
393393
trainer = Trainer(auto_scale_batch_size=True, callbacks=cb)
394394
model = BoringModel()
395395

396-
with pytest.raises(MisconfigurationException, match="Trainer is already configured with a .* callback"):
396+
with pytest.raises(MisconfigurationException, match="Trainer is already configured with a `BatchSizeFinder`"):
397397
trainer.tune(model)
398398

399399

tests/tests_pytorch/tuner/test_tuning.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
import pytest
1515

1616
from pytorch_lightning import Trainer
17+
from pytorch_lightning.callbacks import BatchSizeFinder, LearningRateFinder
1718
from pytorch_lightning.demos.boring_classes import BoringModel
1819
from pytorch_lightning.utilities.exceptions import MisconfigurationException
1920

@@ -25,3 +26,37 @@ def test_tuner_with_distributed_strategies():
2526

2627
with pytest.raises(MisconfigurationException, match=r"not supported with `Trainer\(strategy='ddp'\)`"):
2728
trainer.tune(model)
29+
30+
31+
def test_tuner_with_already_configured_batch_size_finder():
32+
"""Test that an error is raised when tuner is already configured with BatchSizeFinder."""
33+
trainer = Trainer(auto_scale_batch_size=True, callbacks=[BatchSizeFinder()])
34+
model = BoringModel()
35+
36+
with pytest.raises(MisconfigurationException, match=r"Trainer is already configured with a `BatchSizeFinder`"):
37+
trainer.tune(model)
38+
39+
40+
def test_tuner_with_already_configured_learning_rate_finder():
41+
"""Test that an error is raised when tuner is already configured with LearningRateFinder."""
42+
trainer = trainer = Trainer(auto_lr_find=True, callbacks=[LearningRateFinder()])
43+
model = BoringModel()
44+
45+
with pytest.raises(MisconfigurationException, match=r"Trainer is already configured with a `LearningRateFinder`"):
46+
trainer.tune(model)
47+
48+
49+
def test_tuner_with_already_configured_learning_rate_finder_and_batch_size_finder():
50+
"""Test that an error is raised when tuner are already configured with LearningRateFinder and
51+
BatchSizeFinder."""
52+
trainer = trainer = Trainer(
53+
auto_lr_find=True, auto_scale_batch_size=True, callbacks=[LearningRateFinder(), BatchSizeFinder()]
54+
)
55+
model = BoringModel()
56+
57+
with pytest.raises(
58+
MisconfigurationException,
59+
match=r"Trainer is already configured with `LearningRateFinder` and "
60+
r"`BatchSizeFinder` callbacks. Please remove them if you want to use the Tuner.",
61+
):
62+
trainer.tune(model)

0 commit comments

Comments
 (0)