{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,9,17]],"date-time":"2025-09-17T06:12:07Z","timestamp":1758089527365,"version":"3.44.0"},"reference-count":30,"publisher":"IEEE","license":[{"start":{"date-parts":[[2025,6,22]],"date-time":"2025-06-22T00:00:00Z","timestamp":1750550400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,6,22]],"date-time":"2025-06-22T00:00:00Z","timestamp":1750550400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100012166","name":"National Key Research and Development Program of China","doi-asserted-by":"publisher","id":[{"id":"10.13039\/501100012166","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025,6,22]]},"DOI":"10.1109\/dac63849.2025.11132578","type":"proceedings-article","created":{"date-parts":[[2025,9,15]],"date-time":"2025-09-15T17:35:41Z","timestamp":1757957741000},"page":"1-7","source":"Crossref","is-referenced-by-count":0,"title":["OutlierCIM: Outlier-Aware Digital CIM-Based LLM Accelerator with Hybrid-Strategy Quantization and Unified FP-INT Computation"],"prefix":"10.1109","author":[{"given":"Zihan","family":"Zou","sequence":"first","affiliation":[{"name":"Southeast University,School of Integrated Circuits,Nanjing,China"}]},{"given":"Shikuang","family":"Chen","sequence":"additional","affiliation":[{"name":"Southeast University,School of Integrated Circuits,Nanjing,China"}]},{"given":"Chen","family":"Zhang","sequence":"additional","affiliation":[{"name":"Southeast University,School of Integrated Circuits,Nanjing,China"}]},{"given":"Xing","family":"Wang","sequence":"additional","affiliation":[{"name":"Southeast University,School of Integrated Circuits,Nanjing,China"}]},{"given":"Zhichao","family":"Liu","sequence":"additional","affiliation":[{"name":"Southeast University,School of Integrated Circuits,Nanjing,China"}]},{"given":"Haoran","family":"Du","sequence":"additional","affiliation":[{"name":"Southeast University,School of Integrated Circuits,Nanjing,China"}]},{"given":"Xin","family":"Si","sequence":"additional","affiliation":[{"name":"Southeast University,School of Integrated Circuits,Nanjing,China"}]},{"given":"Hao","family":"Cai","sequence":"additional","affiliation":[{"name":"Southeast University,School of Integrated Circuits,Nanjing,China"}]},{"given":"Bo","family":"Liu","sequence":"additional","affiliation":[{"name":"Southeast University,School of Integrated Circuits,Nanjing,China"}]}],"member":"263","reference":[{"key":"ref1","first-page":"4171","article-title":"BERT: Pretraining of deep bidirectional transformers for language understanding","volume-title":"Proceedings of the NAACL-HLT","author":"Devlin"},{"issue":"8","key":"ref2","first-page":"9","article-title":"\u2019Language models are unsupervised multitask learners","volume":"1","author":"Radford","journal-title":"OpenAI blog"},{"key":"ref3","article-title":"Neural machine translation by jointly learning to align and translate","author":"Bahdanau","year":"2014","journal-title":"arXiv:1409.0473"},{"key":"ref4","article-title":"Abstractive text summarization using sequence-to-sequence rnns and beyond","author":"Nallapati","year":"2016","journal-title":"arXiv:1602.06023"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D13-1160"},{"key":"ref6","article-title":"OmniQuant: Omnidirectionally calibrated quantization for large language models","author":"Shao","year":"2023","journal-title":"arXiv:2308.13137"},{"key":"ref7","first-page":"38087","article-title":"SmoothQuant: Accurate and efficient post-training quantization for large language models","volume-title":"Proceedings of the 40th International Conference on Machine Learning","volume":"202","author":"Xiao"},{"key":"ref8","article-title":"AffineQuant: Affine transformation quantization for large language models","author":"Ma","year":"2024","journal-title":"arXiv:2403.12544"},{"key":"ref9","article-title":"\u2019DuQuant: Distributing outliers via dual transformation makes stronger quantized LLMs","author":"Lin","year":"2024","journal-title":"arXiv:2406.01721"},{"key":"ref10","article-title":"8-bit matrix multiplication for transformers at scale","author":"Dettmers","year":"2022","journal-title":"LLM.int8"},{"key":"ref11","article-title":"A Survey on efficient inference for large language models","author":"Zhou","year":"2024","journal-title":"arXiv:2404.14294"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1109\/MICRO50266.2020.00071"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/ISCA.2018.00063"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1145\/3579371.3589038"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1145\/3649329.3656221"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/ISCA59077.2024.00080"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.5948\/upo9781614442073.012"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/DAC56929.2023.10247976"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1145\/3649329.3655690"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1109\/JSSC.2022.3222059"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1109\/HPCA57654.2024.00064"},{"article-title":"ILLM: Efficient Integer-Only inference for Fully-Quantized Low-Bit large language models,\"2024","volume-title":"arXiv:2405.17849","author":"Hu","key":"ref22"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1109\/isscc42615.2023.10067260"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV51070.2023.01565"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1109\/IEEESTD.2008.4610935"},{"key":"ref26","article-title":"LLaMA: Open and efficient foundation language models","author":"Touvron","year":"2023","journal-title":"arXiv:2302.13971"},{"key":"ref27","article-title":"OPT: Open Pre-trained transformer language models","author":"Zhang","year":"2022","journal-title":"arXiv:2205.01068"},{"key":"ref28","first-page":"1","article-title":"Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer","volume":"21","author":"Raffel","year":"2020","journal-title":"Journal of machine learning research"},{"key":"ref29","article-title":"Pointer sentinel mixture models","author":"Merity","year":"2016","journal-title":"arXiv:1609.07843"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1145\/3503222.3507738"}],"event":{"name":"2025 62nd ACM\/IEEE Design Automation Conference (DAC)","start":{"date-parts":[[2025,6,22]]},"location":"San Francisco, CA, USA","end":{"date-parts":[[2025,6,25]]}},"container-title":["2025 62nd ACM\/IEEE Design Automation Conference (DAC)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/11132383\/11132091\/11132578.pdf?arnumber=11132578","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,9,16]],"date-time":"2025-09-16T05:31:13Z","timestamp":1758000673000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11132578\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,6,22]]},"references-count":30,"URL":"https:\/\/doi.org\/10.1109\/dac63849.2025.11132578","relation":{},"subject":[],"published":{"date-parts":[[2025,6,22]]}}}