{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,10,1]],"date-time":"2025-10-01T15:27:15Z","timestamp":1759332435695,"version":"3.37.3"},"reference-count":28,"publisher":"IEEE","license":[{"start":{"date-parts":[[2022,7,18]],"date-time":"2022-07-18T00:00:00Z","timestamp":1658102400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2022,7,18]],"date-time":"2022-07-18T00:00:00Z","timestamp":1658102400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100007434","name":"Ag\u00eancia Nacional de Inova\u00e7\u00e3o","doi-asserted-by":"publisher","id":[{"id":"10.13039\/501100007434","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2022,7,18]]},"DOI":"10.1109\/fuzz-ieee55066.2022.9882595","type":"proceedings-article","created":{"date-parts":[[2022,9,14]],"date-time":"2022-09-14T19:39:59Z","timestamp":1663184399000},"page":"1-8","source":"Crossref","is-referenced-by-count":1,"title":["A Study on the Best Way to Compress Natural Language Processing Models"],"prefix":"10.1109","author":[{"given":"Joao","family":"Antunes","sequence":"first","affiliation":[{"name":"Universidade de Lisboa,INESC-ID, Instituto Superior T&#x00E9;cnico,Lisbon,Portugal"}]},{"given":"Miguel L.","family":"Pardal","sequence":"additional","affiliation":[{"name":"Universidade de Lisboa,INESC-ID, Instituto Superior T&#x00E9;cnico,Lisbon,Portugal"}]},{"given":"Luisa","family":"Coheur","sequence":"additional","affiliation":[{"name":"Universidade de Lisboa,INESC-ID, Instituto Superior T&#x00E9;cnico,Lisbon,Portugal"}]}],"member":"263","reference":[{"article-title":"To prune, or not to prune: exploring the efficacy of pruning for model compression","year":"2017","author":"zhu","key":"ref10"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00890"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.repl4nlp-1.18"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00286"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1109\/EMC2-NIPS53020.2019.00016"},{"article-title":"DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter","year":"2020","author":"sanh","key":"ref15"},{"article-title":"The Lottery Ticket Hypothesis: Finding Sparse, Trainable Neural Networks","year":"2019","author":"frankle","key":"ref16"},{"article-title":"The Lottery Ticket Hypothesis for Pre-trained BERT Networks","year":"2020","author":"chen","key":"ref17"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.emnlp-main.259"},{"year":"0","key":"ref19","article-title":"Greenhouse gas emission intensity of electricity generation &#x2014; European Environment Agency"},{"article-title":"Sparsity in Deep Learning: Pruning and growth for efficient inference and training in neural networks","year":"2021","author":"hoefler","key":"ref28"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1355"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D19-1441"},{"article-title":"The Cost of Training NLP Models: A Concise Overview","year":"2020","author":"sharir","key":"ref3"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D19-1224"},{"article-title":"A Survey of Model Compression and Acceleration for Deep Neural Networks","year":"2020","author":"cheng","key":"ref5"},{"article-title":"Distilling the Knowledge in a Neural Network","year":"2015","author":"hinton","key":"ref8"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1145\/1150402.1150464"},{"article-title":"Language Models are Few-Shot Learners","year":"2020","author":"brown","key":"ref2"},{"article-title":"Well-Read Students Learn Better: On the Importance of Pre-training Compact Models","year":"2019","author":"turc","key":"ref9"},{"key":"ref1","first-page":"4171","article-title":"BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding","author":"devlin","year":"2019","journal-title":"Proceedings of the 2019 Conference of the North American Chapter of the Association for Computational Linguistics Human Language Technologies Volume 1 (Long and Short Papers)"},{"year":"2020","key":"ref20","article-title":"Emissions & Generation Resource Integrated Database (eGRID)"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.3115\/1119176.1119195"},{"key":"ref21","first-page":"142","article-title":"Learning word vectors for sentiment analysis","author":"maas","year":"2011","journal-title":"Proceedings of the 49th Annual Meeting of the Association for Computational Linguistics Human Language Technologies"},{"article-title":"Pointer Sentinel Mixture Models","year":"2016","author":"merity","key":"ref24"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P18-1205"},{"article-title":"Language Models are Unsupervised Multitask Learners","year":"2019","author":"radford","key":"ref26"},{"key":"ref25","first-page":"8024","article-title":"Pytorch: An imperative style, high-performance deep learning library","author":"paszke","year":"2019","journal-title":"Advances in Neural IInformation Processing Systems"}],"event":{"name":"2022 IEEE International Conference on Fuzzy Systems (FUZZ-IEEE)","start":{"date-parts":[[2022,7,18]]},"location":"Padua, Italy","end":{"date-parts":[[2022,7,23]]}},"container-title":["2022 IEEE International Conference on Fuzzy Systems (FUZZ-IEEE)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/9882530\/9882539\/09882595.pdf?arnumber=9882595","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,10,3]],"date-time":"2022-10-03T20:34:41Z","timestamp":1664829281000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9882595\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2022,7,18]]},"references-count":28,"URL":"https:\/\/doi.org\/10.1109\/fuzz-ieee55066.2022.9882595","relation":{},"subject":[],"published":{"date-parts":[[2022,7,18]]}}}