Skip to content

Commit 0b0bd91

Browse files
Xerxes-2Tps-Ffumiama
authored
Add timestamp and elapsed time for epoch (#273)
* add timestamp and epoch elapsed time * don't need a class * Revert "add timestamp and epoch elapsed time" This reverts commit 93b8d4a. * adjust class def * delete duplicate import --------- Co-authored-by: Ftps <[email protected]> Co-authored-by: 源文雨 <[email protected]>
1 parent 8a9909b commit 0b0bd91

File tree

1 file changed

+15
-1
lines changed

1 file changed

+15
-1
lines changed

train_nsf_sim_cache_sid_load_pretrain.py

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
sys.path.append(os.path.join(now_dir))
55
sys.path.append(os.path.join(now_dir, "train"))
66
import utils
7+
import datetime
78

89
hps = utils.get_hparams()
910
os.environ["CUDA_VISIBLE_DEVICES"] = hps.gpus.replace("-", ",")
@@ -50,6 +51,18 @@
5051

5152
global_step = 0
5253

54+
class EpochRecorder:
55+
def __init__(self):
56+
self.last_time = ttime()
57+
58+
59+
def record(self):
60+
now_time = ttime()
61+
elapsed_time = now_time - self.last_time
62+
self.last_time = now_time
63+
elapsed_time_str = str(datetime.timedelta(seconds=elapsed_time))
64+
current_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
65+
return f"[{current_time}] | ({elapsed_time_str})"
5366

5467
def main():
5568
# n_gpus = torch.cuda.device_count()
@@ -323,6 +336,7 @@ def train_and_evaluate(
323336
data_iterator = enumerate(train_loader)
324337

325338
# Run steps
339+
epoch_recorder = EpochRecorder()
326340
for batch_idx, info in data_iterator:
327341
# Data
328342
## Unpack
@@ -542,7 +556,7 @@ def train_and_evaluate(
542556
)
543557

544558
if rank == 0:
545-
logger.info("====> Epoch: {}".format(epoch))
559+
logger.info("====> Epoch: {} {}".format(epoch, epoch_recorder.record()))
546560
if epoch >= hps.total_epoch and rank == 0:
547561
logger.info("Training is done. The program is closed.")
548562

0 commit comments

Comments
 (0)