1010from omegaconf import DictConfig , OmegaConf
1111from pytorch_lightning import LightningModule , Trainer
1212from pytorch_lightning .callbacks import ModelCheckpoint
13- from pytorch_lightning .loggers import LightningLoggerBase
13+ from pytorch_lightning .loggers . logger import Logger
1414
1515from nn_core .common import PROJECT_ROOT
1616
2020_STATS_KEY : str = "stats"
2121
2222
23- class NNLogger (LightningLoggerBase ):
23+ class NNLogger (Logger ):
2424
25- __doc__ = LightningLoggerBase .__doc__
25+ __doc__ = Logger .__doc__
2626
2727 def __init__ (self , logging_cfg : DictConfig , cfg : DictConfig , resume_id : Optional [str ]):
2828 super ().__init__ ()
@@ -39,7 +39,7 @@ def __init__(self, logging_cfg: DictConfig, cfg: DictConfig, resume_id: Optional
3939 self .logging_cfg .logger .mode = "offline"
4040
4141 pylogger .info (f"Instantiating <{ self .logging_cfg .logger ['_target_' ].split ('.' )[- 1 ]} >" )
42- self .wrapped : LightningLoggerBase = hydra .utils .instantiate (
42+ self .wrapped : Logger = hydra .utils .instantiate (
4343 self .logging_cfg .logger ,
4444 version = self .resume_id ,
4545 dir = os .getenv ("WANDB_DIR" , "." ),
@@ -115,7 +115,7 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None):
115115
116116 This method logs metrics as as soon as it received them. If you want to aggregate
117117 metrics for one specific `step`, use the
118- :meth:`~pytorch_lightning.loggers.base.LightningLoggerBase .agg_and_log_metrics` method.
118+ :meth:`~pytorch_lightning.loggers.base.Logger .agg_and_log_metrics` method.
119119
120120 Args:
121121 metrics: Dictionary with metric names as keys and measured quantities as values
0 commit comments