{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,12,19]],"date-time":"2025-12-19T15:46:52Z","timestamp":1766159212772},"reference-count":35,"publisher":"Springer Science and Business Media LLC","issue":"8","license":[{"start":{"date-parts":[[2023,6,20]],"date-time":"2023-06-20T00:00:00Z","timestamp":1687219200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2023,6,20]],"date-time":"2023-06-20T00:00:00Z","timestamp":1687219200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["Neural Process Lett"],"published-print":{"date-parts":[[2023,12]]},"DOI":"10.1007\/s11063-023-11328-8","type":"journal-article","created":{"date-parts":[[2023,6,20]],"date-time":"2023-06-20T13:02:23Z","timestamp":1687266143000},"page":"10315-10329","update-policy":"http:\/\/dx.doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":4,"title":["Adversarial-Based Ensemble Feature Knowledge Distillation"],"prefix":"10.1007","volume":"55","author":[{"given":"Mingwen","family":"Shao","sequence":"first","affiliation":[]},{"given":"Shunhang","family":"Li","sequence":"additional","affiliation":[]},{"given":"Zilu","family":"Peng","sequence":"additional","affiliation":[]},{"given":"Yuantao","family":"Sun","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2023,6,20]]},"reference":[{"key":"11328_CR1","doi-asserted-by":"crossref","unstructured":"Chen D, Mei J, Zhang H, Wang C, Feng Y, Chen C (2022) Knowledge distillation with the reused teacher classifier. ArXiv:2203.14001","DOI":"10.1109\/CVPR52688.2022.01163"},{"key":"11328_CR2","doi-asserted-by":"crossref","unstructured":"Chen P, Liu S, Zhao H, Jia J (2021) Distilling knowledge via knowledge review. In: 2021 IEEE\/CVF conference on computer vision and pattern recognition (CVPR), pp 5006\u20135015","DOI":"10.1109\/CVPR46437.2021.00497"},{"key":"11328_CR3","unstructured":"Chung I, Park S, Kim J, Kwak N (2020) Feature-map-level online adversarial knowledge distillation. In: ICML"},{"key":"11328_CR4","doi-asserted-by":"crossref","unstructured":"Cui J, Chen P, Li R, Liu S, Shen X, Jia J (2019) Fast and practical neural architecture search. In: Proceedings of the IEEE\/CVF international conference on computer vision, pp 6509\u20136518","DOI":"10.1109\/ICCV.2019.00661"},{"key":"11328_CR5","unstructured":"Goodfellow IJ, Pouget-Abadie J, Mirza M, Xu B, Warde-Farley D, Ozair S, Courville AC, Bengio Y (2014) Generative adversarial nets. In: NIPS"},{"key":"11328_CR6","unstructured":"Hinton G, Vinyals O, Dean J et\u00a0al. (2015) Distilling the knowledge in a neural network. Preprint at arxiv:1503.02531v1"},{"key":"11328_CR7","unstructured":"Howard AG, Zhu M, Chen B, Kalenichenko D, Wang W, Weyand T, Andreetto M, Adam H (2017) Mobilenets: efficient convolutional neural networks for mobile vision applications. Preprint at arxiv:1704.04861"},{"key":"11328_CR8","doi-asserted-by":"crossref","unstructured":"Jiang N, Tang J, Yu W (2022) Positive-unlabeled learning for knowledge distillation. Neural Process Lett","DOI":"10.1007\/s11063-022-11038-7"},{"key":"11328_CR9","doi-asserted-by":"crossref","unstructured":"Jiang X, Deng X (2022) Knowledge reverse distillation based confidence calibration for deep neural networks. Neural Process Lett","DOI":"10.1007\/s11063-022-10885-8"},{"key":"11328_CR10","unstructured":"Jolicoeur-Martineau A (2018) The relativistic discriminator: a key element missing from standard gan. arXiv preprint arXiv:1807.00734"},{"key":"11328_CR11","doi-asserted-by":"crossref","unstructured":"Kim J, Hyun M, Chung I, Kwak N (2021) Feature fusion for online mutual knowledge distillation. In: 2020 25th international conference on pattern recognition (ICPR). IEEE, pp 4619\u20134625","DOI":"10.1109\/ICPR48806.2021.9412615"},{"key":"11328_CR12","unstructured":"Kim J, Park SU, Kwak N (2018) Paraphrasing complex network: network compression via factor transfer. Adv Neural Inf Process Syst 31"},{"key":"11328_CR13","unstructured":"Kingma DP, Ba J (2014) Adam: a method for stochastic optimization. CoRR, abs\/1412.6980"},{"key":"11328_CR14","unstructured":"Komodakis N, Zagoruyko S (2017) Paying more attention to attention: improving the performance of convolutional neural networks via attention transfer. In: ICLR"},{"key":"11328_CR15","unstructured":"Li H, Kadav A, Durdanovic I, Samet H, Graf HP (2016) Pruning filters for efficient convnets. Preprint at arxiv:1608.08710"},{"key":"11328_CR16","doi-asserted-by":"crossref","unstructured":"Liang J, Li L, Bing Z, Zhao B, Tang Y, Lin B, Fan H (2022) Efficient one pass self-distillation with zipf\u2019s label smoothing. In: European conference on computer vision","DOI":"10.1007\/978-3-031-20083-0_7"},{"key":"11328_CR17","doi-asserted-by":"crossref","unstructured":"Luo J-H, Wu J, Lin W (2017) Thinet: a filter level pruning method for deep neural network compression. In: Proceedings of the IEEE international conference on computer vision, pp 5058\u20135066","DOI":"10.1109\/ICCV.2017.541"},{"key":"11328_CR18","unstructured":"Park SU, Kwak N (2019) Feed: feature-level ensemble for knowledge distillation. Preprint at arxiv:1909.10754"},{"key":"#cr-split#-11328_CR19.1","unstructured":"Qian B, Wang Y, Yin H, Hong R, Wang M (2022) Switchable online knowledge distillation. In: Avidan S, Brostow G, Ciss\u00e9 M, Farinella GM, Hassner T"},{"key":"#cr-split#-11328_CR19.2","unstructured":"(ed) Computer Vision-ECCV, vol 2022, pp 449-466"},{"key":"11328_CR20","doi-asserted-by":"crossref","unstructured":"Rastegari M, Ordonez V, Redmon J, Farhadi A (2016) Xnor-net: imagenet classification using binary convolutional neural networks. In: European conference on computer vision. Springer, pp 525\u2013542","DOI":"10.1007\/978-3-319-46493-0_32"},{"key":"11328_CR21","unstructured":"Romero A, Ballas N, Kahou SE, Chassang A, Gatta C, Bengio Y (2015) Fitnets: hints for thin deep nets. Preprint at arxiv:1412.6550"},{"key":"11328_CR22","doi-asserted-by":"crossref","unstructured":"Sandler M, Howard A, Zhu M, Zhmoginov A, Chen L-C (2018) Mobilenetv2: inverted residuals and linear bottlenecks. In: Proceedings of the IEEE conference on computer vision and pattern recognition, pp 4510\u20134520","DOI":"10.1109\/CVPR.2018.00474"},{"key":"11328_CR23","doi-asserted-by":"crossref","unstructured":"Selvaraju RR, Das A, Vedantam R, Cogswell M, Parikh D, Batra D (2017) Grad-cam: visual explanations from deep networks via gradient-based localization. Int J Comput Vis 128:336\u2013359","DOI":"10.1007\/s11263-019-01228-7"},{"key":"11328_CR24","doi-asserted-by":"crossref","unstructured":"Shen Z, He Z, Xue X (2019) Meal: multi-model ensemble via adversarial learning. In: Proceedings of the AAAI conference on artificial intelligence, vol 33, pp 4886\u20134893","DOI":"10.1609\/aaai.v33i01.33014886"},{"key":"11328_CR25","unstructured":"Tian Y, Krishnan D, Isola P (2019) Contrastive representation distillation. Preprint at arxiv:1910.10699"},{"key":"11328_CR26","doi-asserted-by":"crossref","unstructured":"Yao A, Sun D (2020) Knowledge transfer via dense cross-layer mutual-distillation. In: European conference on computer vision. Springer, pp 294\u2013311","DOI":"10.1007\/978-3-030-58555-6_18"},{"key":"11328_CR27","doi-asserted-by":"crossref","unstructured":"Yim J, Joo D, Bae J, Kim J (2017) A gift from knowledge distillation: fast optimization, network minimization and transfer learning. In: Proceedings of the IEEE conference on computer vision and pattern recognition, pp 4133\u20134141","DOI":"10.1109\/CVPR.2017.754"},{"key":"11328_CR28","first-page":"11","volume":"27","author":"J Yosinski","year":"2014","unstructured":"Yosinski J, Clune J, Bengio Y, Lipson H (2014) How transferable are features in deep neural networks? Adv Neural Inf Process Syst 27:11","journal-title":"Adv Neural Inf Process Syst"},{"key":"11328_CR29","doi-asserted-by":"crossref","unstructured":"Yuan L, Tay FEH, Li G, Wang T, Feng J (2020) Revisiting knowledge distillation via label smoothing regularization. In: Proceedings of the IEEE\/CVF conference on computer vision and pattern recognition, pp 3903\u20133911","DOI":"10.1109\/CVPR42600.2020.00396"},{"key":"11328_CR30","doi-asserted-by":"crossref","unstructured":"Zhang X, Lu S, Gong H, Luo Z, Liu M (2020) Amln: adversarial-based mutual learning network for online knowledge distillation. In: ECCV","DOI":"10.1007\/978-3-030-58610-2_10"},{"key":"11328_CR31","doi-asserted-by":"crossref","unstructured":"Zhang Y, Xiang T, Hospedales TM, Lu H (2018) Deep mutual learning. In: Proceedings of the IEEE conference on computer vision and pattern recognition, pp 4320\u20134328","DOI":"10.1109\/CVPR.2018.00454"},{"key":"11328_CR32","doi-asserted-by":"crossref","unstructured":"Zhao B, Cui Q, Song R, Qiu Y, Liang J (2022) Decoupled knowledge distillation. In: 2022 IEEE\/CVF conference on computer vision and pattern recognition (CVPR), pp 11943\u201311952","DOI":"10.1109\/CVPR52688.2022.01165"},{"key":"11328_CR33","doi-asserted-by":"crossref","unstructured":"Zhu J, Tang S, Chen D, Yu S, Liu Y, Rong M, Yang A, Wang X (2021) Complementary relation contrastive distillation. In: Proceedings of the IEEE\/CVF conference on computer vision and pattern recognition, pp 9260\u20139269","DOI":"10.1109\/CVPR46437.2021.00914"},{"key":"11328_CR34","unstructured":"Zhu X, Gong S et\u00a0al. (2018) Knowledge distillation by on-the-fly native ensemble. Adv Neural Inf Process Syst 31"}],"container-title":["Neural Processing Letters"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11063-023-11328-8.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s11063-023-11328-8\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11063-023-11328-8.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2023,11,22]],"date-time":"2023-11-22T05:00:31Z","timestamp":1700629231000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s11063-023-11328-8"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2023,6,20]]},"references-count":35,"journal-issue":{"issue":"8","published-print":{"date-parts":[[2023,12]]}},"alternative-id":["11328"],"URL":"https:\/\/doi.org\/10.1007\/s11063-023-11328-8","relation":{},"ISSN":["1370-4621","1573-773X"],"issn-type":[{"value":"1370-4621","type":"print"},{"value":"1573-773X","type":"electronic"}],"subject":[],"published":{"date-parts":[[2023,6,20]]},"assertion":[{"value":"7 June 2023","order":1,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"20 June 2023","order":2,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Declarations"}},{"value":"The authors have no competing interests to declare that are relevant to the content of this article.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Conflict of interest"}}]}}