Skip to content

Commit 327a52e

Browse files
authored
Add Perseus final version (#276)
1 parent 137f2db commit 327a52e

File tree

3 files changed

+26
-0
lines changed

3 files changed

+26
-0
lines changed

source/_data/SymbioticLab.bib

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1682,6 +1682,27 @@ @Article{perseus:arxiv23
16821682
}
16831683
}
16841684
1685+
@InProceedings{perseus:sosp24,
1686+
author = {Jae-Won Chung and Yile Gu and Insu Jang and Luoxi Meng and Nikhil Bansal and Mosharaf Chowdhury},
1687+
booktitle = {SOSP},
1688+
title = {Reducing Energy Bloat in Large Model Training},
1689+
year = {2024},
1690+
month = {Nov},
1691+
publist_confkey = {SOSP'24},
1692+
publist_link = {paper || perseus-sosp24.pdf},
1693+
publist_link = {code || https://github.com/ml-energy/zeus},
1694+
publist_link = {website || https://ml.energy/zeus/research_overview/perseus},
1695+
publist_topic = {Energy-Efficient Systems},
1696+
publist_topic = {Systems + AI},
1697+
publist_badge = {Artifacts Available},
1698+
publist_badge = {Artifacts Functional},
1699+
publist_badge = {Results Reproduced},
1700+
publist_abstract = {
1701+
Training large AI models on numerous GPUs consumes a massive amount of energy, making power delivery one of the largest limiting factors in building and operating datacenters for AI workloads. However, we observe that not all energy consumed during training directly contributes to end-to-end throughput; a significant portion can be removed without slowing down training. We call this portion energy bloat.
1702+
1703+
In this work, we identify two independent sources of energy bloat in large model training and propose Perseus, a training system that mitigates both. To do this, Perseus obtains the time–energy tradeoff frontier of a large model training job using an efficient graph cut-based algorithm, and schedules computation energy consumption across time to reduce both types of energy bloat. Evaluation on large models, including GPT-3 and Bloom, shows that Perseus reduces the energy consumption of large model training by up to 30% without any throughput loss or hardware modification.
1704+
}}
1705+
16851706
@Article{llm-survey:arxiv23,
16861707
author = {Zhongwei Wan and Xin Wang and Che Liu and Samiul Alam and Yu Zheng and Zhongnan Qu and Shen Yan and Yi Zhu and Quanlu Zhang and Mosharaf Chowdhury and Mi Zhang},
16871708
journal = {CoRR},
1.4 MB
Binary file not shown.

source/publications/index.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,11 @@ venues:
3636
SOSP:
3737
category: Conferences
3838
occurrences:
39+
- key: SOSP'24
40+
name: The 30th ACM Symposium on Operating Systems and Principles
41+
date: 2024-11-04
42+
url: https://sigops.org/s/conferences/sosp/2024/
43+
acceptance: 17.34%
3944
- key: SOSP'23
4045
name: The 29th ACM Symposium on Operating Systems and Principles
4146
date: 2023-10-26

0 commit comments

Comments
 (0)