Skip to content

Commit fdb5245

Browse files
authored
Add cross-layer energy paper (#270)
* Add cross-layer energy paper * Add braces to AI
1 parent d65df6b commit fdb5245

File tree

3 files changed

+21
-0
lines changed

3 files changed

+21
-0
lines changed

source/_data/SymbioticLab.bib

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1739,6 +1739,20 @@ @Article{crosslayer-energy:arxiv24
17391739
}
17401740
}
17411741
1742+
@inproceedings{crosslayer-energy:eecs24,
1743+
author = {Jae-Won Chung and Nishil Talati and Mosharaf Chowdhury},
1744+
booktitle = {Energy-Efficient Computing for Science Workshop},
1745+
title = {Toward Cross-Layer Energy Optimizations in {AI} Systems},
1746+
year = {2024},
1747+
1748+
publist_confkey = {EECS'24},
1749+
publist_link = {paper || crosslayer-energy-eecs24.pdf},
1750+
publist_topic = {Systems + AI},
1751+
publist_topic = {Energy-Efficient Systems},
1752+
publist_abstract = {The ``AI for Science, Energy, and Security'' report from DOE outlines a significant focus on developing and optimizing artificial intelligence workflows for a foundational impact on a broad range of DOE missions. With the pervasive usage of artificial intelligence (AI) and machine learning (ML) tools and techniques, their energy efficiency is likely to become the gating factor toward adoption. This is because generative AI (GenAI) models are massive energy hogs: for instance, training a 200-billion parameter large language model (LLM) at Amazon is estimated to have taken 11.9 GWh, which is enough to power more than a thousand average U.S. households for a year. Inference consumes even more energy, because a model trained once serve millions. Given this scale, high energy efficiency is key to addressing the power delivery problem of constructing and operating new supercomputers and datacenters specialized for AI workloads. In that regard, we outline software- and architecture-level research challenges and opportunities, setting the stage for creating cross-layer energy optimizations in AI systems.
1753+
}
1754+
}
1755+
17421756
@Article{fedtrans:arxiv24,
17431757
author = {Yuxuan Zhu and Jiachen Liu and Mosharaf Chowdhury and Fan Lai},
17441758
title = {{FedTrans}: Efficient Federated Learning Over Heterogeneous Clients via Model Transformation},
Binary file not shown.

source/publications/index.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -324,6 +324,13 @@ venues:
324324
name: 1st Workshop on Sustainable Computer Systems Design and Implementation
325325
date: 2022-07-10
326326
url: https://hotcarbon.org/
327+
EECS:
328+
category: Workshops
329+
occurrences:
330+
- key: EECS'24
331+
name: Energy-Efficient Computing for Science Workshop
332+
date: 2024-09-09
333+
url: https://web.cvent.com/event/a3dd901a-699e-408c-8a84-81445e6ea64f
327334
'USENIX ;login:':
328335
category: Journals
329336
occurrences:

0 commit comments

Comments
 (0)