{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,27]],"date-time":"2026-03-27T06:53:18Z","timestamp":1774594398210,"version":"3.50.1"},"reference-count":23,"publisher":"IEEE","license":[{"start":{"date-parts":[[2025,9,1]],"date-time":"2025-09-01T00:00:00Z","timestamp":1756684800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,9,1]],"date-time":"2025-09-01T00:00:00Z","timestamp":1756684800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/100000015","name":"DOE","doi-asserted-by":"publisher","award":["DESC0024458"],"award-info":[{"award-number":["DESC0024458"]}],"id":[{"id":"10.13039\/100000015","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025,9,1]]},"DOI":"10.1109\/fpl68686.2025.00014","type":"proceedings-article","created":{"date-parts":[[2026,3,26]],"date-time":"2026-03-26T19:48:24Z","timestamp":1774554504000},"page":"10-17","source":"Crossref","is-referenced-by-count":0,"title":["ReconFormer: A Multi-Level Run-Time Reconfigurable System-on-Chip for Accelerating Transformers"],"prefix":"10.1109","author":[{"given":"Je","family":"Yang","sequence":"first","affiliation":[{"name":"Columbia University in the City of New York,Department of Computer Science,New York,NY,10027"}]},{"given":"Gabriele","family":"Tombesi","sequence":"additional","affiliation":[{"name":"Columbia University in the City of New York,Department of Computer Science,New York,NY,10027"}]},{"given":"Joseph","family":"Zuckerman","sequence":"additional","affiliation":[{"name":"Columbia University in the City of New York,Department of Computer Science,New York,NY,10027"}]},{"given":"Luca P.","family":"Carloni","sequence":"additional","affiliation":[{"name":"Columbia University in the City of New York,Department of Computer Science,New York,NY,10027"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.48550\/ARXIV.1706.03762"},{"key":"ref2","article-title":"Bert: Pre-training of deep bidirectional transformers for language understanding","author":"Devlin","year":"2018","journal-title":"arXiv preprint"},{"issue":"8","key":"ref3","first-page":"9","article-title":"Language models are unsupervised multitask learners","volume":"1","author":"Radford","year":"2019","journal-title":"OpenAI blog"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/HPCA51647.2021.00018"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.23919\/DATE54114.2022.9774692"},{"key":"ref6","first-page":"606","article-title":"Efficiently scaling transformer inference","volume-title":"Proceedings of Machine Learning and Systems","volume":"5","author":"Pope","year":"2023"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/MICRO56248.2022.00051"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.23919\/DATE58400.2024.10546617"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.52202\/068431-1189"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1145\/3575693.3575747"},{"key":"ref11","article-title":"Full stack optimization of transformer inference: a survey","author":"Kim","year":"2023","journal-title":"arXiv preprint"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1145\/3466752.3480065"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1145\/2744769.2744794"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1109\/ASPDAC.2016.7428012"},{"key":"ref15","first-page":"1","article-title":"Agile SoC development with open esp","volume-title":"Proceedings of the International Conference On Computer Aided Design (ICCAD)","author":"Mantovani","year":"2020"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1145\/2897937.2905018"},{"key":"ref17","year":"2024","journal-title":"NVIDIA Data Center Deep Learning Product Performance AI Inference"},{"key":"ref18","first-page":"38087","article-title":"Smoothquant: Accurate and efficient post-training quantization for large language models","volume-title":"International Conference on Machine Learning","author":"Xiao","year":"2023"},{"key":"ref19","first-page":"87","article-title":"AWQ: Activation-aware weight quantization for on-device 11 m compression and acceleration","volume-title":"Proceedings of Machine Learning and Systems","volume":"6","author":"Lin","year":"2024"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1145\/3649329.3655953"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1109\/DAC18074.2021.9586134"},{"key":"ref22","first-page":"571","article-title":"Salo: an efficient spatial accelerator enabling hybrid sparse attention mechanisms for long sequences","volume-title":"Proceedings of the Design Automation Conference","author":"Shen","year":"2022"},{"key":"ref23","first-page":"577","article-title":"NNLUT: neural approximation of non-linear operations for efficient transformer inference","volume-title":"Proceedings of the Design Automation Conference (DAC)","author":"Yu","year":"2022"}],"event":{"name":"2025 35th International Conference on Field-Programmable Logic and Applications (FPL)","location":"Leiden, Netherlands","start":{"date-parts":[[2025,9,1]]},"end":{"date-parts":[[2025,9,5]]}},"container-title":["2025 35th International Conference on Field-Programmable Logic and Applications (FPL)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/11449056\/11449057\/11449114.pdf?arnumber=11449114","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,3,27]],"date-time":"2026-03-27T05:23:18Z","timestamp":1774588998000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11449114\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,9,1]]},"references-count":23,"URL":"https:\/\/doi.org\/10.1109\/fpl68686.2025.00014","relation":{},"subject":[],"published":{"date-parts":[[2025,9,1]]}}}