{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,2,21]],"date-time":"2025-02-21T01:07:00Z","timestamp":1740100020234,"version":"3.37.3"},"reference-count":17,"publisher":"IEEE","license":[{"start":{"date-parts":[[2021,6,6]],"date-time":"2021-06-06T00:00:00Z","timestamp":1622937600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2021,6,6]],"date-time":"2021-06-06T00:00:00Z","timestamp":1622937600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2021,6,6]],"date-time":"2021-06-06T00:00:00Z","timestamp":1622937600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100003347","name":"Fudan University","doi-asserted-by":"publisher","id":[{"id":"10.13039\/501100003347","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2021,6,6]]},"DOI":"10.1109\/icassp39728.2021.9415015","type":"proceedings-article","created":{"date-parts":[[2021,5,13]],"date-time":"2021-05-13T19:53:45Z","timestamp":1620935625000},"page":"1675-1679","source":"Crossref","is-referenced-by-count":2,"title":["Adaptable Ensemble Distillation"],"prefix":"10.1109","author":[{"given":"Yankai","family":"Wang","sequence":"first","affiliation":[]},{"given":"Dawei","family":"Yang","sequence":"additional","affiliation":[]},{"given":"Wei","family":"Zhang","sequence":"additional","affiliation":[]},{"given":"Zhe","family":"Jiang","sequence":"additional","affiliation":[]},{"given":"Wenqiang","family":"Zhang","sequence":"additional","affiliation":[]}],"member":"263","reference":[{"key":"ref10","first-page":"7517","article-title":"Knowledge distillation by on-the-fly native ensemble","author":"zhu","year":"2018","journal-title":"Advances in neural information processing systems"},{"doi-asserted-by":"publisher","key":"ref11","DOI":"10.1109\/CVPR.2018.00454"},{"key":"ref12","first-page":"1832","article-title":"Collaborative learning for deep neural networks","author":"song","year":"2018","journal-title":"Advances in neural information processing systems"},{"year":"2020","author":"chung","article-title":"Feature-map-level online adversarial knowledge distillation","key":"ref13"},{"year":"2019","author":"chen","article-title":"Online knowledge distillation with diverse peers","key":"ref14"},{"year":"2020","author":"tang","article-title":"Understanding and improving knowledge distillation","key":"ref15"},{"doi-asserted-by":"publisher","key":"ref16","DOI":"10.1007\/s11263-015-0816-y"},{"year":"2016","author":"iandola","article-title":"Squeezenet: Alexnet-level accuracy with 50x fewer parameters and< 0.5 mb model size","key":"ref17"},{"year":"2015","author":"han","article-title":"Deep compression: Compressing deep neural networks with pruning, trained quantization and huffman coding","key":"ref4"},{"key":"ref3","first-page":"4700","article-title":"Densely connected convolutional networks","author":"huang","year":"2017","journal-title":"Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition"},{"year":"2015","author":"hinton","article-title":"Distilling the knowledge in a neural network","key":"ref6"},{"year":"2016","author":"li","article-title":"Pruning filters for efficient con-vnets","key":"ref5"},{"doi-asserted-by":"publisher","key":"ref8","DOI":"10.1109\/CVPR.2017.754"},{"year":"2014","author":"romero","article-title":"Fitnets: Hints for thin deep nets","key":"ref7"},{"doi-asserted-by":"publisher","key":"ref2","DOI":"10.1109\/CVPR.2016.90"},{"year":"2014","author":"simonyan","article-title":"Very deep convolutional networks for large-scale image recognition","key":"ref1"},{"year":"2018","author":"anil","article-title":"Large scale distributed neural network training through online distillation","key":"ref9"}],"event":{"name":"ICASSP 2021 - 2021 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)","start":{"date-parts":[[2021,6,6]]},"location":"Toronto, ON, Canada","end":{"date-parts":[[2021,6,11]]}},"container-title":["ICASSP 2021 - 2021 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/9413349\/9413350\/09415015.pdf?arnumber=9415015","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,5,10]],"date-time":"2022-05-10T15:40:48Z","timestamp":1652197248000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9415015\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2021,6,6]]},"references-count":17,"URL":"https:\/\/doi.org\/10.1109\/icassp39728.2021.9415015","relation":{},"subject":[],"published":{"date-parts":[[2021,6,6]]}}}