Skip to content

Commit e1fd2d2

Browse files
committed
Minor tweaks
Signed-off-by: Olatunji Ruwase <olruwase@microsoft.com>
1 parent 54c2564 commit e1fd2d2

File tree

2 files changed

+1
-3
lines changed

2 files changed

+1
-3
lines changed

megatron/training.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1185,9 +1185,8 @@ def training_log(loss_dict, total_loss_dict, learning_rate, iteration,
11851185
log_string += ' samples per second: {:.3f} |'.format(samples_per_sec)
11861186
log_string += ' tokens per gpu per second (tgs): {:.3f} |'.format(tokens_per_gpu_per_second)
11871187
log_string += ' TFLOPs: {:.2f} |'.format(tflops)
1188-
log_string += ' \n TeraFLOPs: {:.2f} |'.format(tflops)
11891188
log_string += ' params(B): {:.2f} |'.format(approx_parameters_in_billions)
1190-
log_string += ' moe params(B): {:.2f}|'.format(moe_parameters_in_billions())
1189+
log_string += ' moe params(B): {:.2f} |'.format(moe_parameters_in_billions())
11911190
total_loss_dict[advanced_iters_key] = 0
11921191
total_loss_dict[skipped_iters_key] = 0
11931192
total_loss_dict[nan_iters_key] = 0

megatron/utils.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,6 @@ def average_losses_across_data_parallel_group(losses):
108108

109109

110110
def report_memory(name):
111-
return
112111
"""Simple GPU memory report."""
113112
mega_bytes = 1024.0 * 1024.0
114113
string = name + ' memory (MB)'

0 commit comments

Comments
 (0)