Detected call of lr_scheduler.step()
before optimizer.step()
#13300
Answered
by
akihironitta
digital-idiot
asked this question in
Lightning Trainer API: Trainer, LightningModule, LightningDataModule
-
When tuning the learning_rate using
Related Issue: #5587 def tune_lr(
model: LightningModule,
tuning_params: Dict[str, Any] = None,
trainer_args: Dict[str, Any] = None
):
if trainer_args is None:
trainer_args = dict()
if tuning_params is None:
tuning_params = dict()
dummy_trainer = Trainer(
**trainer_args
)
lr_finder = dummy_trainer.tuner.lr_find(model=model, **tuning_params)
lr = lr_finder.suggestion()
del dummy_trainer
return lr
net.hparams.lr = tune_lr(
model=net,
tuning_params={
"mode": "exponential",
"datamodule": data_module,
"min_lr": 1e-08,
"max_lr": 1.0
},
trainer_args={
"callbacks": [
StochasticWeightAveraging(swa_lrs=1e-2),
EarlyStopping(
monitor="Validation-Mean_Loss",
mode="min",
patience=10,
strict=True,
check_finite=True,
min_delta=1e-3,
check_on_train_epoch_end=False,
)
],
"accumulate_grad_batches": 1,
"check_val_every_n_epoch": 10,
"num_sanity_val_steps": 0,
"detect_anomaly": False,
"log_every_n_steps": 1,
"enable_progress_bar": True,
"precision": 16,
"sync_batchnorm": False,
"enable_model_summary": False,
"max_epochs": max_epochs,
"accelerator": "gpu",
"devices": -1
}
) |
Beta Was this translation helpful? Give feedback.
Answered by
akihironitta
Jun 15, 2022
Replies: 1 comment 3 replies
-
@digital-idiot Can I see the trainer args as well? |
Beta Was this translation helpful? Give feedback.
3 replies
Answer selected by
digital-idiot
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
@digital-idiot Can I see the trainer args as well?