{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,7]],"date-time":"2026-04-07T21:52:41Z","timestamp":1775598761412,"version":"3.50.1"},"publisher-location":"New York, NY, USA","reference-count":28,"publisher":"ACM","license":[{"start":{"date-parts":[[2023,10,30]],"date-time":"2023-10-30T00:00:00Z","timestamp":1698624000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2023,10,30]]},"DOI":"10.1145\/3620678.3624793","type":"proceedings-article","created":{"date-parts":[[2023,10,31]],"date-time":"2023-10-31T13:58:07Z","timestamp":1698760687000},"page":"588-596","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":30,"title":["Sustainable Supercomputing for AI"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0009-0009-5100-5147","authenticated-orcid":false,"given":"Dan","family":"Zhao","sequence":"first","affiliation":[{"name":"Massachusetts Institute of Technology (MIT)"}]},{"ORCID":"https:\/\/orcid.org\/0009-0000-2884-9688","authenticated-orcid":false,"given":"Siddharth","family":"Samsi","sequence":"additional","affiliation":[{"name":"MIT Lincoln Laboratory"}]},{"ORCID":"https:\/\/orcid.org\/0009-0004-6477-8476","authenticated-orcid":false,"given":"Joseph","family":"McDonald","sequence":"additional","affiliation":[{"name":"MIT Lincoln Laboratory"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-9778-1023","authenticated-orcid":false,"given":"Baolin","family":"Li","sequence":"additional","affiliation":[{"name":"Northeastern University"}]},{"ORCID":"https:\/\/orcid.org\/0009-0002-7684-1191","authenticated-orcid":false,"given":"David","family":"Bestor","sequence":"additional","affiliation":[{"name":"MIT Lincoln Laboratory"}]},{"ORCID":"https:\/\/orcid.org\/0009-0005-8066-5620","authenticated-orcid":false,"given":"Michael","family":"Jones","sequence":"additional","affiliation":[{"name":"MIT Lincoln Laboratory"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-7253-2458","authenticated-orcid":false,"given":"Devesh","family":"Tiwari","sequence":"additional","affiliation":[{"name":"Northeastern University"}]},{"ORCID":"https:\/\/orcid.org\/0009-0004-5782-7137","authenticated-orcid":false,"given":"Vijay","family":"Gadepally","sequence":"additional","affiliation":[{"name":"MIT Lincoln Laboratory"}]}],"member":"320","published-online":{"date-parts":[[2023,10,31]]},"reference":[{"key":"e_1_3_2_1_1_1","volume-title":"Introducing the AI Research SuperCluster --- Meta's cutting-edge AI supercomputer for AI research. Meta AI Research Blog","author":"Meta AI.","year":"2022","unstructured":"Meta AI. 2022. Introducing the AI Research SuperCluster --- Meta's cutting-edge AI supercomputer for AI research. Meta AI Research Blog (2022)."},{"key":"e_1_3_2_1_2_1","volume-title":"1st Workshop on Sustainable Computer Systems Design and Implementation (HotCarbon).","author":"Bashir Noman","year":"2022","unstructured":"Noman Bashir, David Irwin, Prashant Shenoy, and Abel Souza. 2022. Sustainable Computing -- Without the Hot Air. In 1st Workshop on Sustainable Computer Systems Design and Implementation (HotCarbon)."},{"key":"e_1_3_2_1_3_1","volume-title":"1st Workshop on Sustainable Computer Systems Design and Implementation (HotCarbon).","author":"Chien Andrew A.","year":"2022","unstructured":"Andrew A. Chien, Chaojie Zhang, Liuzixuan Lin, and Varsha Rao. 2022. Beyond PUE: Flexible Datacenters Empowering the Cloud to Decarbonize. In 1st Workshop on Sustainable Computer Systems Design and Implementation (HotCarbon)."},{"key":"e_1_3_2_1_4_1","doi-asserted-by":"publisher","DOI":"10.1038\/s42256-021-00356-5"},{"key":"e_1_3_2_1_5_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/N19-1423"},{"key":"e_1_3_2_1_6_1","doi-asserted-by":"publisher","DOI":"10.1109\/HPEC55821.2022.9926375"},{"key":"e_1_3_2_1_7_1","doi-asserted-by":"publisher","DOI":"10.1002\/cpe.4485"},{"key":"e_1_3_2_1_8_1","unstructured":"Miguel Hern\u00e1n and James Robins. 2020. In Causal Inference: What If."},{"key":"e_1_3_2_1_9_1","volume-title":"Distilling the Knowledge in a Neural Network. stat 1050","author":"Hinton Geoffrey","year":"2015","unstructured":"Geoffrey Hinton, Oriol Vinyals, and Jeff Dean. 2015. Distilling the Knowledge in a Neural Network. stat 1050 (2015), 9."},{"key":"e_1_3_2_1_10_1","article-title":"Sparsity in Deep Learning: Pruning and Growth for Efficient Inference and Training in Neural Networks","volume":"22","author":"Hoefler Torsten","year":"2021","unstructured":"Torsten Hoefler, Dan Alistarh, Tal Ben-Nun, Nikoli Dryden, and Alexandra Peste. 2021. Sparsity in Deep Learning: Pruning and Growth for Efficient Inference and Training in Neural Networks. J. Mach. Learn. Res. 22, 1, Article 241 (jan 2021), 124 pages.","journal-title":"J. Mach. Learn. Res."},{"key":"e_1_3_2_1_11_1","doi-asserted-by":"publisher","DOI":"10.1109\/SC.2005.3"},{"key":"e_1_3_2_1_12_1","volume-title":"Achieving More With Less: Optimizing Efficiency in Supercomputing","author":"Johnson-Groh Mara","year":"2023","unstructured":"Mara Johnson-Groh. 2023. Achieving More With Less: Optimizing Efficiency in Supercomputing. Flatiron Institute, Flatiron Scientist Spotlight (2023)."},{"key":"e_1_3_2_1_13_1","doi-asserted-by":"crossref","unstructured":"A. Krzywaniak P. Czarnul and J. Proficz. 2022. GPU Power Capping for Energy-Performance Trade-Offs in Training of Deep Convolutional Neural Networks for Image Recognition. ICCS (2022).","DOI":"10.1007\/978-3-031-08751-6_48"},{"key":"e_1_3_2_1_14_1","doi-asserted-by":"publisher","DOI":"10.1016\/j.future.2023.03.041"},{"key":"e_1_3_2_1_15_1","doi-asserted-by":"publisher","DOI":"10.1109\/IPDPSW.2012.117"},{"key":"e_1_3_2_1_16_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.findings-naacl.151"},{"key":"e_1_3_2_1_17_1","doi-asserted-by":"publisher","DOI":"10.1145\/3458817.3476209"},{"key":"e_1_3_2_1_18_1","volume-title":"Wortman Vaughan (Eds.)","volume":"34","author":"Paul Mansheej","year":"2021","unstructured":"Mansheej Paul, Surya Ganguli, and Gintare Karolina Dziugaite. 2021. Deep Learning on a Data Diet: Finding Important Examples Early in Training. In Advances in Neural Information Processing Systems, M. Ranzato, A. Beygelzimer, Y. Dauphin, P.S. Liang, and J. Wortman Vaughan (Eds.), Vol. 34. Curran Associates, Inc., 20596--20607. https:\/\/proceedings.neurips.cc\/paper_files\/paper\/2021\/file\/ac56f8fe9eea3e4a365f29f0f1957c55-Paper.pdf"},{"key":"e_1_3_2_1_19_1","volume-title":"Interactive supercomputing on 40,000 cores for machine learning and data analysis. In 2018 IEEE High Performance extreme Computing Conference (HPEC)","author":"Reuther Albert","unstructured":"Albert Reuther, Jeremy Kepner, Chansup Byun, Siddharth Samsi, William Arcand, David Bestor, Bill Bergeron, Vijay Gadepally, Michael Houle, Matthew Hubbell, Michael Jones, Anna Klein, Lauren Milechin, Julia Mullen, Andrew Prout, Antonio Rosa, Charles Yee, and Peter Michaleas. 2018. Interactive supercomputing on 40,000 cores for machine learning and data analysis. In 2018 IEEE High Performance extreme Computing Conference (HPEC). IEEE, 1--6."},{"key":"e_1_3_2_1_20_1","unstructured":"Mohammad Shoeybi Mostofa Patwary Raul Puri Patrick LeGresley Jared Casper and Bryan Catanzaro. 2020. Megatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism. arXiv:1909.08053 [cs.CL]"},{"key":"e_1_3_2_1_21_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1355"},{"key":"e_1_3_2_1_22_1","doi-asserted-by":"publisher","DOI":"10.1109\/DSN.2016.36"},{"key":"e_1_3_2_1_23_1","volume-title":"Retrieved","author":"Touvron Hugo","year":"2023","unstructured":"Hugo Touvron and Thomas Scialom et. al. 2023. Llama 2: Open Foundation and Fine-Tuned Chat Models. Retrieved September 24, 2023 from https:\/\/ai.meta.com\/research\/publications\/llama-2-open-foundation-and-fine-tuned-chat-models"},{"key":"e_1_3_2_1_24_1","unstructured":"Hugo Touvron Thibaut Lavril Gautier Izacard Xavier Martinet Marie-Anne Lachaux Timoth\u00e9e Lacroix Baptiste Rozi\u00e8re Naman Goyal Eric Hambro Faisal Azhar Aurelien Rodriguez Armand Joulin Edouard Grave and Guillaume Lample. 2023. LLaMA: Open and Efficient Foundation Language Models. arXiv:2302.13971 [cs.CL]"},{"key":"e_1_3_2_1_25_1","doi-asserted-by":"publisher","DOI":"10.1093\/biomet\/34.1-2.28"},{"key":"e_1_3_2_1_26_1","volume-title":"Workshop.","year":"2023","unstructured":"BigScience Workshop. 2023. BLOOM: A 176B-Parameter Open-Access Multilingual Language Model. arXiv:2211.05100 [cs.CL]"},{"key":"e_1_3_2_1_27_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.acl-long.107"},{"key":"e_1_3_2_1_28_1","doi-asserted-by":"publisher","DOI":"10.1109\/IPDPSW55747.2022.00126"}],"event":{"name":"SoCC '23: ACM Symposium on Cloud Computing","location":"Santa Cruz CA USA","acronym":"SoCC '23","sponsor":["SIGMOD ACM Special Interest Group on Management of Data","SIGOPS ACM Special Interest Group on Operating Systems"]},"container-title":["Proceedings of the 2023 ACM Symposium on Cloud Computing"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3620678.3624793","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3620678.3624793","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,8,22]],"date-time":"2025-08-22T15:53:22Z","timestamp":1755878002000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3620678.3624793"}},"subtitle":["GPU Power Capping at HPC Scale"],"short-title":[],"issued":{"date-parts":[[2023,10,30]]},"references-count":28,"alternative-id":["10.1145\/3620678.3624793","10.1145\/3620678"],"URL":"https:\/\/doi.org\/10.1145\/3620678.3624793","relation":{},"subject":[],"published":{"date-parts":[[2023,10,30]]},"assertion":[{"value":"2023-10-31","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}