{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,1,2]],"date-time":"2026-01-02T07:24:13Z","timestamp":1767338653591,"version":"3.41.0"},"publisher-location":"New York, NY, USA","reference-count":33,"publisher":"ACM","license":[{"start":{"date-parts":[[2024,2,17]],"date-time":"2024-02-17T00:00:00Z","timestamp":1708128000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2024,2,17]]},"DOI":"10.1145\/3640537.3641572","type":"proceedings-article","created":{"date-parts":[[2024,2,20]],"date-time":"2024-02-20T21:43:05Z","timestamp":1708465385000},"page":"227-237","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":1,"title":["Fast and Accurate Context-Aware Basic Block Timing Prediction using Transformers"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0000-0001-7668-2560","authenticated-orcid":false,"given":"Abderaouf Nassim","family":"Amalou","sequence":"first","affiliation":[{"name":"University of Rennes - Inria - CNRS - IRISA, Rennes, France"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-0133-3491","authenticated-orcid":false,"given":"Elisa","family":"Fromont","sequence":"additional","affiliation":[{"name":"University of Rennes - IUF - Inria - CNRS - IRISA, Rennes, France"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-9310-9651","authenticated-orcid":false,"given":"Isabelle","family":"Puaut","sequence":"additional","affiliation":[{"name":"University of Rennes - Inria - CNRS - IRISA, Rennes, France"}]}],"member":"320","published-online":{"date-parts":[[2024,2,20]]},"reference":[{"key":"e_1_3_2_1_1_1","unstructured":"Accessed 2023. ARM Cortex-A53 Processor. https:\/\/developer.arm.com\/ip-products\/processors\/cortex-a\/cortex-a53"},{"key":"e_1_3_2_1_2_1","unstructured":"Accessed 2023. ARM Cortex-A72 Processor. https:\/\/developer.arm.com\/ip-products\/processors\/cortex-a\/cortex-a72"},{"key":"e_1_3_2_1_3_1","unstructured":"Accessed 2023. ARM Cortex-M4 Processor. https:\/\/developer.arm.com\/ip-products\/processors\/cortex-m\/cortex-m4"},{"key":"e_1_3_2_1_4_1","unstructured":"Accessed 2023. ARM Cortex-M7 Processor. https:\/\/developer.arm.com\/ip-products\/processors\/cortex-m\/cortex-m7"},{"key":"e_1_3_2_1_5_1","doi-asserted-by":"publisher","DOI":"10.1109\/ICTAI56018.2022.00090"},{"key":"e_1_3_2_1_6_1","doi-asserted-by":"publisher","unstructured":"Abderaouf Nassim AMALOU Isabelle Puaut and Elisa Fromont. 2023. Pre-training and fine-tuning dataset for transformers consisting of basic blocks and their execution times (average minimum and maximum) along with the execution context of these blocks for various Cortex processors M7 M4 A53 and A72.. https:\/\/doi.org\/10.5281\/zenodo.10043908 10.5281\/zenodo.10043908","DOI":"10.5281\/zenodo.10043908"},{"key":"e_1_3_2_1_7_1","doi-asserted-by":"publisher","DOI":"10.1109\/RTCSA52859.2021.00011"},{"key":"e_1_3_2_1_8_1","doi-asserted-by":"publisher","DOI":"10.1006\/csla.1999.0128"},{"key":"e_1_3_2_1_9_1","doi-asserted-by":"publisher","DOI":"10.1109\/IISWC47752.2019.9042166"},{"key":"e_1_3_2_1_10_1","doi-asserted-by":"publisher","DOI":"10.1145\/2355585.2355594"},{"key":"e_1_3_2_1_11_1","volume-title":"Transformer-xl: Attentive language models beyond a fixed-length context. arXiv preprint arXiv:1901.02860.","author":"Dai Zihang","year":"2019","unstructured":"Zihang Dai, Zhilin Yang, Yiming Yang, Jaime Carbonell, Quoc V Le, and Ruslan Salakhutdinov. 2019. Transformer-xl: Attentive language models beyond a fixed-length context. arXiv preprint arXiv:1901.02860."},{"key":"e_1_3_2_1_12_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1"},{"key":"e_1_3_2_1_13_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1"},{"key":"e_1_3_2_1_14_1","unstructured":"SEGGER Microcontroller GmbH. [n. d.]. Ozone User Guide & Reference Manual. 348 pages. https:\/\/www.segger.com\/"},{"key":"e_1_3_2_1_15_1","volume-title":"GraphCode BERT: Pre-training Code Representations with Data Flow. In International Conference on Learning Representations. https:\/\/openreview.net\/forum?id=jLoC4ez43PZ","author":"Guo Daya","year":"2021","unstructured":"Daya Guo, Shuo Ren, Shuai Lu, Zhangyin Feng, Duyu Tang, Shujie LIU, Long Zhou, Nan Duan, Alexey Svyatkovskiy, Shengyu Fu, Michele Tufano, Shao Kun Deng, Colin Clement, Dawn Drain, Neel Sundaresan, Jian Yin, Daxin Jiang, and Ming Zhou. 2021. GraphCode BERT: Pre-training Code Representations with Data Flow. In International Conference on Learning Representations. https:\/\/openreview.net\/forum?id=jLoC4ez43PZ"},{"key":"e_1_3_2_1_16_1","doi-asserted-by":"publisher","DOI":"10.1109\/WWC.2001.990739"},{"key":"e_1_3_2_1_17_1","doi-asserted-by":"crossref","unstructured":"Urvashi Khandelwal He He Peng Qi and Dan Jurafsky. 2018. Sharp nearby fuzzy far away: How neural language models use context. arXiv preprint arXiv:1805.04623.","DOI":"10.18653\/v1\/P18-1027"},{"key":"e_1_3_2_1_18_1","doi-asserted-by":"publisher","DOI":"10.1007\/3-540-45874-3_10"},{"key":"e_1_3_2_1_19_1","volume-title":"Sentencepiece: A simple and language independent subword tokenizer and detokenizer for neural text processing. arXiv preprint arXiv:1808.06226.","author":"Kudo Taku","year":"2018","unstructured":"Taku Kudo and John Richardson. 2018. Sentencepiece: A simple and language independent subword tokenizer and detokenizer for neural text processing. arXiv preprint arXiv:1808.06226."},{"key":"e_1_3_2_1_20_1","doi-asserted-by":"publisher","DOI":"10.1145\/3460120.3484587"},{"key":"e_1_3_2_1_21_1","doi-asserted-by":"publisher","DOI":"10.1109\/MM.2020.3012883"},{"key":"e_1_3_2_1_22_1","volume-title":"International Conference on machine learning. 4505\u20134515","author":"Mendis Charith","year":"2019","unstructured":"Charith Mendis, Alex Renda, Saman Amarasinghe, and Michael Carbin. 2019. Ithemal: Accurate, portable and fast basic block throughput estimation using deep neural networks. In International Conference on machine learning. 4505\u20134515."},{"key":"e_1_3_2_1_23_1","volume-title":"Int. Conference on machine learning. PMLR.","author":"Mendis Charith","year":"2019","unstructured":"Charith Mendis, Alex Renda, Saman Amarasinghe, and Michael Carbin. 2019. Ithemal: Accurate, portable and fast basic block throughput estimation using deep neural networks. In Int. Conference on machine learning. PMLR."},{"key":"e_1_3_2_1_24_1","volume-title":"Distributed representations of words and phrases and their compositionality. Advances in neural information processing systems, 26","author":"Mikolov Tomas","year":"2013","unstructured":"Tomas Mikolov, Ilya Sutskever, Kai Chen, Greg S Corrado, and Jeff Dean. 2013. Distributed representations of words and phrases and their compositionality. Advances in neural information processing systems, 26 (2013)."},{"key":"e_1_3_2_1_25_1","unstructured":"Ruchir Puri David S Kung Geert Janssen Wei Zhang Giacomo Domeniconi Vladimir Zolotov Julian Dolby Jie Chen Mihir Choudhury and Lindsey Decker. 2021. CodeNet: A large-scale AI for code dataset for learning a diversity of coding tasks. arXiv preprint arXiv:2105.12655."},{"key":"e_1_3_2_1_26_1","unstructured":"Segger. [n. d.]. J-Trace PRO \u2013 The Leading Trace Solution. https:\/\/www.segger.com\/products\/debug-probes\/j-trace\/"},{"key":"e_1_3_2_1_27_1","doi-asserted-by":"publisher","DOI":"10.23919\/DATE54114.2022.9774589"},{"key":"e_1_3_2_1_28_1","doi-asserted-by":"publisher","DOI":"10.1109\/SP.2016.17"},{"key":"e_1_3_2_1_29_1","doi-asserted-by":"publisher","DOI":"10.1109\/IISWC55918.2022.00012"},{"key":"e_1_3_2_1_30_1","volume-title":"\u0141 ukasz Kaiser, and Illia Polosukhin","author":"Vaswani Ashish","year":"2017","unstructured":"Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan N Gomez, \u0141 ukasz Kaiser, and Illia Polosukhin. 2017. Attention is all you need. Advances in neural information processing systems, 30 (2017)."},{"key":"e_1_3_2_1_31_1","volume-title":"\u0141 ukasz Kaiser, and Illia Polosukhin","author":"Vaswani Ashish","year":"2017","unstructured":"Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan N Gomez, \u0141 ukasz Kaiser, and Illia Polosukhin. 2017. Attention is all you need. In Advances in neural information processing systems. 5998\u20136008."},{"key":"e_1_3_2_1_32_1","doi-asserted-by":"publisher","DOI":"10.1109\/JPROC.2018.2817118"},{"key":"e_1_3_2_1_33_1","unstructured":"Tomofumi Yuki and Louis-No\u00ebl Pouchet. 2016. PolyBench 4.2. 1 (pre-release)."}],"event":{"name":"CC '24: 33rd ACM SIGPLAN International Conference on Compiler Construction","sponsor":["SIGPLAN ACM Special Interest Group on Programming Languages"],"location":"Edinburgh United Kingdom","acronym":"CC '24"},"container-title":["Proceedings of the 33rd ACM SIGPLAN International Conference on Compiler Construction"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3640537.3641572","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3640537.3641572","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,18]],"date-time":"2025-06-18T22:50:24Z","timestamp":1750287024000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3640537.3641572"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,2,17]]},"references-count":33,"alternative-id":["10.1145\/3640537.3641572","10.1145\/3640537"],"URL":"https:\/\/doi.org\/10.1145\/3640537.3641572","relation":{},"subject":[],"published":{"date-parts":[[2024,2,17]]},"assertion":[{"value":"2024-02-20","order":2,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}