{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,6,4]],"date-time":"2025-06-04T05:46:30Z","timestamp":1749015990394,"version":"3.28.0"},"reference-count":25,"publisher":"IEEE","license":[{"start":{"date-parts":[[2022,5,23]],"date-time":"2022-05-23T00:00:00Z","timestamp":1653264000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2022,5,23]],"date-time":"2022-05-23T00:00:00Z","timestamp":1653264000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2022,5,23]]},"DOI":"10.1109\/icassp43922.2022.9746113","type":"proceedings-article","created":{"date-parts":[[2022,4,27]],"date-time":"2022-04-27T19:50:34Z","timestamp":1651089034000},"page":"7992-7996","source":"Crossref","is-referenced-by-count":2,"title":["Compressing Transformer-Based ASR Model by Task-Driven Loss and Attention-Based Multi-Level Feature Distillation"],"prefix":"10.1109","author":[{"given":"Yongjie","family":"Lv","sequence":"first","affiliation":[{"name":"Tianjin University,Tianjin Key Laboratory of Cognitive Computing and Application, College of Intelligence and Computing,Tianjin,China"}]},{"given":"Longbiao","family":"Wang","sequence":"additional","affiliation":[{"name":"Tianjin University,Tianjin Key Laboratory of Cognitive Computing and Application, College of Intelligence and Computing,Tianjin,China"}]},{"given":"Meng","family":"Ge","sequence":"additional","affiliation":[{"name":"Tianjin University,Tianjin Key Laboratory of Cognitive Computing and Application, College of Intelligence and Computing,Tianjin,China"}]},{"given":"Sheng","family":"Li","sequence":"additional","affiliation":[{"name":"National Institute of Information and Communications Technology (NICT),Kyoto,Japan"}]},{"given":"Chenchen","family":"Ding","sequence":"additional","affiliation":[{"name":"National Institute of Information and Communications Technology (NICT),Kyoto,Japan"}]},{"given":"Lixin","family":"Pan","sequence":"additional","affiliation":[{"name":"Huiyan Technology (TianJin) Co., Ltd.,Tianjin,China"}]},{"given":"Yuguang","family":"Wang","sequence":"additional","affiliation":[{"name":"Huiyan Technology (TianJin) Co., Ltd.,Tianjin,China"}]},{"given":"Jianwu","family":"Dang","sequence":"additional","affiliation":[{"name":"Tianjin University,Tianjin Key Laboratory of Cognitive Computing and Application, College of Intelligence and Computing,Tianjin,China"}]},{"given":"Kiyoshi","family":"Honda","sequence":"additional","affiliation":[{"name":"Tianjin University,Tianjin Key Laboratory of Cognitive Computing and Application, College of Intelligence and Computing,Tianjin,China"}]}],"member":"263","reference":[{"article-title":"Efficient 8-bit quantization of transformer neural machine language translation model","year":"2019","author":"bhandare","key":"ref10"},{"article-title":"Fully quantized transformer for machine translation","year":"2019","author":"prato","key":"ref11"},{"article-title":"What is the state of neural network pruning?","year":"2020","author":"blalock","key":"ref12"},{"article-title":"Distilling the knowledge in a neural network","year":"2015","author":"hinton","key":"ref13"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP.2019.8683171"},{"article-title":"Improved knowledge distillation via teacher assistant","year":"2019","author":"mirzadeh","key":"ref15"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D16-1139"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D19-1441"},{"article-title":"Tutornet: Towards flexible knowledge distillation for end-to-end speech recognition","year":"2020","author":"yoon","key":"ref18"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.findings-emnlp.372"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.21437\/Interspeech.2018-1423"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.21437\/Interspeech.2020-3015"},{"key":"ref6","first-page":"253","article-title":"On the effectiveness of low-rank matrix factorization for LSTM model compression","author":"winata","year":"2019","journal-title":"Proc PACLIC"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP.2013.6638949"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP40776.2020.9053878"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP.2016.7472823"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.21437\/Interspeech.2020-1784"},{"article-title":"A simplified fully quantized trans-former for end-to-end speech recognition","year":"2019","author":"bie","key":"ref9"},{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP.2018.8462506"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-main.195"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.21437\/Interspeech.2018-1456"},{"key":"ref21","doi-asserted-by":"crossref","first-page":"13657","DOI":"10.1609\/aaai.v35i15.17610","article-title":"ALP-KD: Attention-based layer projection for knowledge distillation","volume":"35","author":"passban","year":"2021","journal-title":"Proceedings of the AAAI Conference on Artificial Intelligence"},{"article-title":"Adam: A method for stochastic optimization","year":"2014","author":"kingma","key":"ref24"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.21437\/Interspeech.2019-2680"},{"key":"ref25","first-page":"5998","article-title":"Attention is all you need","author":"vaswani","year":"2017","journal-title":"Proc NeurIPS"}],"event":{"name":"ICASSP 2022 - 2022 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)","start":{"date-parts":[[2022,5,23]]},"location":"Singapore, Singapore","end":{"date-parts":[[2022,5,27]]}},"container-title":["ICASSP 2022 - 2022 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/9745891\/9746004\/09746113.pdf?arnumber=9746113","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,9,23]],"date-time":"2024-09-23T03:23:56Z","timestamp":1727061836000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9746113\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2022,5,23]]},"references-count":25,"URL":"https:\/\/doi.org\/10.1109\/icassp43922.2022.9746113","relation":{},"subject":[],"published":{"date-parts":[[2022,5,23]]}}}