1616
1717#::: } :::#
1818#::: if (it.save_training || it.save_evaluation) { :::#
19- from ignite .handlers import (
20- Checkpoint ,
21- DiskSaver ,
22- global_step_from_engine ,
23- ) # usort: skip
19+ from ignite .handlers import Checkpoint , DiskSaver , global_step_from_engine # usort: skip
2420
2521#::: } else { :::#
2622from ignite .handlers import Checkpoint
@@ -84,9 +80,7 @@ def log_metrics(engine: Engine, tag: str) -> None:
8480 tag
8581 a string to add at the start of output.
8682 """
87- metrics_format = "{0} [{1}/{2}]: {3}" .format (
88- tag , engine .state .epoch , engine .state .iteration , engine .state .metrics
89- )
83+ metrics_format = "{0} [{1}/{2}]: {3}" .format (tag , engine .state .epoch , engine .state .iteration , engine .state .metrics )
9084 engine .logger .info (metrics_format )
9185
9286
@@ -175,21 +169,13 @@ def setup_exp_logging(config, trainer, optimizers, evaluators):
175169 """Setup Experiment Tracking logger from Ignite."""
176170
177171 #::: if (it.logger === 'clearml') { :::#
178- logger = common .setup_clearml_logging (
179- trainer , optimizers , evaluators , config .log_every_iters
180- )
172+ logger = common .setup_clearml_logging (trainer , optimizers , evaluators , config .log_every_iters )
181173 #::: } else if (it.logger === 'mlflow') { :::#
182- logger = common .setup_mlflow_logging (
183- trainer , optimizers , evaluators , config .log_every_iters
184- )
174+ logger = common .setup_mlflow_logging (trainer , optimizers , evaluators , config .log_every_iters )
185175 #::: } else if (it.logger === 'neptune') { :::#
186- logger = common .setup_neptune_logging (
187- trainer , optimizers , evaluators , config .log_every_iters
188- )
176+ logger = common .setup_neptune_logging (trainer , optimizers , evaluators , config .log_every_iters )
189177 #::: } else if (it.logger === 'polyaxon') { :::#
190- logger = common .setup_plx_logging (
191- trainer , optimizers , evaluators , config .log_every_iters
192- )
178+ logger = common .setup_plx_logging (trainer , optimizers , evaluators , config .log_every_iters )
193179 #::: } else if (it.logger === 'tensorboard') { :::#
194180 logger = common .setup_tb_logging (
195181 config .output_dir ,
@@ -199,13 +185,9 @@ def setup_exp_logging(config, trainer, optimizers, evaluators):
199185 config .log_every_iters ,
200186 )
201187 #::: } else if (it.logger === 'visdom') { :::#
202- logger = common .setup_visdom_logging (
203- trainer , optimizers , evaluators , config .log_every_iters
204- )
188+ logger = common .setup_visdom_logging (trainer , optimizers , evaluators , config .log_every_iters )
205189 #::: } else if (it.logger === 'wandb') { :::#
206- logger = common .setup_wandb_logging (
207- trainer , optimizers , evaluators , config .log_every_iters
208- )
190+ logger = common .setup_wandb_logging (trainer , optimizers , evaluators , config .log_every_iters )
209191 #::: } :::#
210192 return logger
211193
0 commit comments