{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,8,7]],"date-time":"2025-08-07T09:14:57Z","timestamp":1754558097050,"version":"3.41.0"},"publisher-location":"New York, NY, USA","reference-count":53,"publisher":"ACM","license":[{"start":{"date-parts":[[2022,2,22]],"date-time":"2022-02-22T00:00:00Z","timestamp":1645488000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"funder":[{"name":"Facebook SysML Research Award"},{"name":"Center for Applications Driving Architectures (ADA), one of six centers of JUMP, a Semiconductor Research Corporation program co-sponsored by DARPA"},{"name":"National Science Foundation CAREER award","award":["CCF-2045974"],"award-info":[{"award-number":["CCF-2045974"]}]}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2022,2,28]]},"DOI":"10.1145\/3503222.3507722","type":"proceedings-article","created":{"date-parts":[[2022,2,22]],"date-time":"2022-02-22T20:49:01Z","timestamp":1645562941000},"page":"655-668","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":8,"title":["ProSE: the architecture and design of a protein discovery engine"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0000-0001-7159-0974","authenticated-orcid":false,"given":"Eyes","family":"Robson","sequence":"first","affiliation":[{"name":"University of California at Berkeley, USA"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-2668-6456","authenticated-orcid":false,"given":"Ceyu","family":"Xu","sequence":"additional","affiliation":[{"name":"Duke University, USA"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-3574-3440","authenticated-orcid":false,"given":"Lisa Wu","family":"Wills","sequence":"additional","affiliation":[{"name":"Duke University, USA"}]}],"member":"320","published-online":{"date-parts":[[2022,2,22]]},"reference":[{"key":"e_1_3_2_1_1_1","doi-asserted-by":"publisher","DOI":"10.1038\/s41592-019-0598-1"},{"key":"e_1_3_2_1_2_1","doi-asserted-by":"publisher","DOI":"10.1145\/3316781.3322472"},{"key":"e_1_3_2_1_3_1","doi-asserted-by":"publisher","DOI":"10.1101\/2020.01.23.917682"},{"key":"e_1_3_2_1_4_1","doi-asserted-by":"publisher","DOI":"10.1371\/journal.pone.0017887"},{"key":"e_1_3_2_1_5_1","doi-asserted-by":"publisher","DOI":"10.1109\/ARITH.2019.00022"},{"key":"e_1_3_2_1_6_1","doi-asserted-by":"publisher","DOI":"10.1109\/ISCA.2016.40"},{"key":"e_1_3_2_1_7_1","doi-asserted-by":"publisher","DOI":"10.1109\/JETCAS.2019.2910232"},{"key":"e_1_3_2_1_8_1","doi-asserted-by":"publisher","unstructured":"Ratul Chowdhury Nazim Bouatta Surojit Biswas Charlotte Rochereau George M. Church Peter K. Sorger and Mohammed AlQuraishi. 2021. Single-sequence protein structure prediction using language models from deep learning. bioRxiv https:\/\/doi.org\/10.1101\/2021.08.02.454840 10.1101\/2021.08.02.454840","DOI":"10.1101\/2021.08.02.454840"},{"key":"e_1_3_2_1_9_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1"},{"key":"e_1_3_2_1_10_1","unstructured":"DeepMind. 2022. AlphaFold: Using AI for scientific discovery. https:\/\/deepmind.com\/blog\/article\/AlphaFold-Using-AI-for-scientific-discovery"},{"key":"e_1_3_2_1_11_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1"},{"key":"e_1_3_2_1_12_1","doi-asserted-by":"publisher","DOI":"10.1109\/MM.2017.37"},{"key":"e_1_3_2_1_13_1","doi-asserted-by":"publisher","unstructured":"David H. Freedman. 2020. Hunting for New Drugs with AI. Scientific American Digital Issue February https:\/\/doi.org\/10.1038\/d41586-019-03846-0 10.1038\/d41586-019-03846-0","DOI":"10.1038\/d41586-019-03846-0"},{"key":"e_1_3_2_1_14_1","doi-asserted-by":"publisher","DOI":"10.1145\/3037697.3037702"},{"key":"e_1_3_2_1_15_1","doi-asserted-by":"publisher","DOI":"10.1109\/MICRO50266.2020.00062"},{"key":"e_1_3_2_1_16_1","doi-asserted-by":"publisher","DOI":"10.1145\/2966986.2980098"},{"key":"e_1_3_2_1_17_1","doi-asserted-by":"publisher","DOI":"10.1002\/jcc.25522"},{"key":"e_1_3_2_1_18_1","volume-title":"Proceedings of the IEEE International Symposium on High-Performance Computer Architecture (HPCA).","author":"Ham Tae Jun","year":"2020","unstructured":"Tae Jun Ham, Sung Jun Jung, Seonghak Kim, Young H Oh, Yeonhong Park, Yoonho Song, Jung-Hun Park, Sanghee Lee, Kyoung Park, Jae W Lee, and Deog-Kyoon Jeong. 2020. A3: Accelerating Attention Mechanisms in Neural Networks with Approximation. In Proceedings of the IEEE International Symposium on High-Performance Computer Architecture (HPCA)."},{"key":"e_1_3_2_1_19_1","doi-asserted-by":"publisher","DOI":"10.1109\/ISCA52012.2021.00060"},{"key":"e_1_3_2_1_20_1","doi-asserted-by":"publisher","DOI":"10.1145\/3360307"},{"key":"e_1_3_2_1_21_1","doi-asserted-by":"publisher","DOI":"10.1145\/3079856.3080246"},{"key":"e_1_3_2_1_22_1","doi-asserted-by":"publisher","unstructured":"John Jumper Richard Evans Alexander Pritzel Tim Green Michael Figurnov Olaf Ronneberger Kathryn Tunyasuvunakool Russ Bates Augustin \u017d\u00eddek Anna Potapenko Alex Bridgland Clemens Meyer Simon A. A. Kohl Andrew J. Ballard Andrew Cowie Bernardino Romera-Paredes Stanislav Nikolov Rishub Jain Jonas Adler Trevor Back Stig Petersen David Reiman Ellen Clancy Michal Zielinski Martin Steinegger Michalina Pacholska Tamas Berghammer Sebastian Bodenstein David Silver Oriol Vinyals Andrew W. Senior Koray Kavukcuoglu Pushmeet Kohli and Demis Hassabis. 2021. Highly accurate protein structure prediction with AlphaFold. Nature 15 Jul issn:1476-4687 https:\/\/doi.org\/10.1038\/s41586-021-03819-2 10.1038\/s41586-021-03819-2","DOI":"10.1038\/s41586-021-03819-2"},{"key":"e_1_3_2_1_23_1","doi-asserted-by":"publisher","DOI":"10.1145\/3297858.3304028"},{"key":"e_1_3_2_1_24_1","doi-asserted-by":"publisher","DOI":"10.1109\/ASAP.2019.00-31"},{"key":"e_1_3_2_1_25_1","doi-asserted-by":"publisher","DOI":"10.1145\/3352460.3358252"},{"key":"e_1_3_2_1_26_1","doi-asserted-by":"publisher","DOI":"10.1145\/3296957.3173176"},{"key":"e_1_3_2_1_27_1","volume-title":"ALBERT: A Lite BERT for Self-supervised Learning of Language Representations. In International Conference on Learning Representations. https:\/\/openreview.net\/forum?id=H1eA7AEtvS","author":"Lan Zhenzhong","year":"2020","unstructured":"Zhenzhong Lan, Mingda Chen, Sebastian Goodman, Kevin Gimpel, Piyush Sharma, and Radu Soricut. 2020. ALBERT: A Lite BERT for Self-supervised Learning of Language Representations. In International Conference on Learning Representations. https:\/\/openreview.net\/forum?id=H1eA7AEtvS"},{"key":"e_1_3_2_1_28_1","volume-title":"Gonzalez","author":"Li Zhuohan","year":"2020","unstructured":"Zhuohan Li, Eric Wallace, Sheng Shen, Kevin Lin, Kurt Keutzer, Dan Klein, and Joseph E. Gonzalez. 2020. Train large, then compress: rethinnking model size for efficient training and inference of transformers. arXiv, February, arxiv:2002.11794"},{"key":"e_1_3_2_1_29_1","doi-asserted-by":"publisher","DOI":"10.1109\/MICRO50266.2020.00066"},{"key":"e_1_3_2_1_30_1","volume-title":"Mike Lewis Luke Zettlemoyer, and Vaseline Stoyanov","author":"Liu Yinhan","year":"2019","unstructured":"Yinhan Liu, Myle Ott, Naman Goyal, Jingfei Du, Mandar Joshi, Danqi Chen, Omer Levy, Mike Lewis Luke Zettlemoyer, and Vaseline Stoyanov. 2019. RoBERTa: A Robustly Optimized BERT Pretraining Approach. ArXiv e-prints, July, arxiv:1907.11692."},{"key":"e_1_3_2_1_31_1","unstructured":"Siyuan Lu Meiqi Wang Shuang Liang Jun Lin and Zhongfeng Wang. 2020. Hardware Accelerator for Multi-Head Attention and Position-Wise Feed-Forward in the Transformer. ArXiv e-prints September arxiv:2009.08605."},{"key":"e_1_3_2_1_32_1","doi-asserted-by":"publisher","DOI":"10.1145\/3318464.3389705"},{"key":"e_1_3_2_1_33_1","doi-asserted-by":"publisher","DOI":"10.1145\/2717764.2717783"},{"key":"e_1_3_2_1_34_1","doi-asserted-by":"publisher","DOI":"10.1145\/3330345.3330385"},{"key":"e_1_3_2_1_35_1","doi-asserted-by":"publisher","unstructured":"Joshua Meier Roshan Rao Robert Verkuil Jason Liu Tom Sercu and Alexander Rives. 2021. Language models enable zero-shot prediction of the effects of mutations on protein function. bioRxiv https:\/\/doi.org\/10.1101\/2021.07.09.450648 10.1101\/2021.07.09.450648","DOI":"10.1101\/2021.07.09.450648"},{"key":"e_1_3_2_1_36_1","unstructured":"NC State University. 2022. FreePDK15. https:\/\/www.eda.ncsu.edu\/freepdk15"},{"key":"e_1_3_2_1_37_1","volume-title":"Hot Chips: A Symposium on High Performance Chips.","author":"NVIDIA.","year":"2018","unstructured":"NVIDIA. 2018. The NVIDIA deep learning accelerator. In Hot Chips: A Symposium on High Performance Chips."},{"key":"e_1_3_2_1_38_1","unstructured":"NVIDIA. 2022. NVIDIA Grace CPU. https:\/\/www.nvidia.com\/en-us\/data-center\/grace-cpu\/"},{"key":"e_1_3_2_1_39_1","unstructured":"NVIDIA. 2022. NVLink and NVSwitch. https:\/\/www.nvidia.com\/en-us\/data-center\/nvlink\/"},{"key":"e_1_3_2_1_40_1","doi-asserted-by":"publisher","DOI":"10.1145\/3079856.3080254"},{"key":"e_1_3_2_1_41_1","unstructured":"PyTorch. 2022. PyTorch: An open source machine learning framework that accelerates the path from research prototyping to production deployment.. http:\/\/pytorch.org"},{"key":"e_1_3_2_1_42_1","doi-asserted-by":"publisher","DOI":"10.1109\/HPCA47549.2020.00015"},{"key":"e_1_3_2_1_43_1","doi-asserted-by":"publisher","DOI":"10.5555\/3454287.3455156"},{"key":"e_1_3_2_1_44_1","unstructured":"Berkeley Architecture Research. 2022. Chisel Hardware Construction Language. https:\/\/chisel.eecs.berkeley.edu\/"},{"key":"e_1_3_2_1_45_1","doi-asserted-by":"publisher","DOI":"10.1073\/pnas.2016239118"},{"key":"e_1_3_2_1_46_1","doi-asserted-by":"publisher","DOI":"10.1002\/pro.2829"},{"key":"e_1_3_2_1_47_1","doi-asserted-by":"publisher","DOI":"10.1016\/j.vlsi.2017.02.002"},{"key":"e_1_3_2_1_48_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1"},{"key":"e_1_3_2_1_49_1","unstructured":"Synopsys. 2022. Synopsys Silicon Design and Verification. https:\/\/www.synopsys.com"},{"key":"e_1_3_2_1_50_1","volume-title":"International Conference on Learning Representations. https:\/\/openreview.net\/forum?id=qVyeW-grC2k","author":"Tay Yi","year":"2021","unstructured":"Yi Tay, Mostafa Dehghani, Samira Abnar, Yikang Shen, Dara Bahri, Philip Pham, Jinfeng Rao, Liu Yang, Sebastian Ruder, and Donald Metzler. 2021. Long Range Arena : A Benchmark for Efficient Transformers. In International Conference on Learning Representations. https:\/\/openreview.net\/forum?id=qVyeW-grC2k"},{"key":"e_1_3_2_1_51_1","volume-title":"Transformer: A Novel Neural Network Architecture for Language Understanding. https:\/\/ai.googleblog.com\/2017\/08\/transformer-novel-neural-network.html","author":"Uszkoreit Jakob","year":"2017","unstructured":"Jakob Uszkoreit. 2017. Transformer: A Novel Neural Network Architecture for Language Understanding. https:\/\/ai.googleblog.com\/2017\/08\/transformer-novel-neural-network.html"},{"volume-title":"Proceedings of the IEEE International Symposium on High-Performance Computer Architecture (HPCA).","author":"Wang H.","key":"e_1_3_2_1_52_1","unstructured":"H. Wang, Z. Zhekai, and S. Han. 2021. SpAtten: Efficient Sparse Attention Architecture with Cascade Token and Head Purning. In Proceedings of the IEEE International Symposium on High-Performance Computer Architecture (HPCA)."},{"key":"e_1_3_2_1_53_1","doi-asserted-by":"publisher","DOI":"10.1038\/s41592-019-0496-6"}],"event":{"name":"ASPLOS '22: 27th ACM International Conference on Architectural Support for Programming Languages and Operating Systems","sponsor":["SIGPLAN ACM Special Interest Group on Programming Languages","SIGOPS ACM Special Interest Group on Operating Systems","SIGARCH ACM Special Interest Group on Computer Architecture","SIGBED ACM Special Interest Group on Embedded Systems"],"location":"Lausanne Switzerland","acronym":"ASPLOS '22"},"container-title":["Proceedings of the 27th ACM International Conference on Architectural Support for Programming Languages and Operating Systems"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3503222.3507722","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3503222.3507722","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,17]],"date-time":"2025-06-17T20:11:39Z","timestamp":1750191099000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3503222.3507722"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2022,2,22]]},"references-count":53,"alternative-id":["10.1145\/3503222.3507722","10.1145\/3503222"],"URL":"https:\/\/doi.org\/10.1145\/3503222.3507722","relation":{},"subject":[],"published":{"date-parts":[[2022,2,22]]},"assertion":[{"value":"2022-02-22","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}