{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,14]],"date-time":"2026-04-14T16:01:07Z","timestamp":1776182467714,"version":"3.50.1"},"reference-count":34,"publisher":"Elsevier BV","license":[{"start":{"date-parts":[[2026,8,1]],"date-time":"2026-08-01T00:00:00Z","timestamp":1785542400000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.elsevier.com\/tdm\/userlicense\/1.0\/"},{"start":{"date-parts":[[2026,8,1]],"date-time":"2026-08-01T00:00:00Z","timestamp":1785542400000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.elsevier.com\/legal\/tdmrep-license"},{"start":{"date-parts":[[2026,8,1]],"date-time":"2026-08-01T00:00:00Z","timestamp":1785542400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-017"},{"start":{"date-parts":[[2026,8,1]],"date-time":"2026-08-01T00:00:00Z","timestamp":1785542400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"},{"start":{"date-parts":[[2026,8,1]],"date-time":"2026-08-01T00:00:00Z","timestamp":1785542400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-012"},{"start":{"date-parts":[[2026,8,1]],"date-time":"2026-08-01T00:00:00Z","timestamp":1785542400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2026,8,1]],"date-time":"2026-08-01T00:00:00Z","timestamp":1785542400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-004"}],"funder":[{"DOI":"10.13039\/501100015401","name":"Key Research and Development Projects of Shaanxi Province","doi-asserted-by":"publisher","award":["2025CY-YBXM-041"],"award-info":[{"award-number":["2025CY-YBXM-041"]}],"id":[{"id":"10.13039\/501100015401","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100015401","name":"Key Research and Development Projects of Shaanxi Province","doi-asserted-by":"publisher","award":["2025CY-YBXM-042"],"award-info":[{"award-number":["2025CY-YBXM-042"]}],"id":[{"id":"10.13039\/501100015401","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62506284"],"award-info":[{"award-number":["62506284"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62133012"],"award-info":[{"award-number":["62133012"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62303366"],"award-info":[{"award-number":["62303366"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["61936006"],"award-info":[{"award-number":["61936006"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":["elsevier.com","sciencedirect.com"],"crossmark-restriction":true},"short-container-title":["Neural Networks"],"published-print":{"date-parts":[[2026,8]]},"DOI":"10.1016\/j.neunet.2026.108804","type":"journal-article","created":{"date-parts":[[2026,3,16]],"date-time":"2026-03-16T07:29:29Z","timestamp":1773646169000},"page":"108804","update-policy":"https:\/\/doi.org\/10.1016\/elsevier_cm_policy","source":"Crossref","is-referenced-by-count":0,"special_numbering":"C","title":["SSA-KD: Self-structure-aware knowledge distillation for convolutional neural networks"],"prefix":"10.1016","volume":"200","author":[{"ORCID":"https:\/\/orcid.org\/0009-0006-1533-9957","authenticated-orcid":false,"given":"Yiheng","family":"Lu","sequence":"first","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0009-0004-6468-7873","authenticated-orcid":false,"given":"Zhihui","family":"Zhang","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0003-2413-4698","authenticated-orcid":false,"given":"Ziyu","family":"Guan","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0003-4455-6521","authenticated-orcid":false,"given":"Wei","family":"Zhao","sequence":"additional","affiliation":[]},{"given":"Yaming","family":"Yang","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0002-7191-7348","authenticated-orcid":false,"given":"Cai","family":"Xu","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0002-0415-8556","authenticated-orcid":false,"given":"Maoguo","family":"Gong","sequence":"additional","affiliation":[]}],"member":"78","reference":[{"key":"10.1016\/j.neunet.2026.108804_bib0001","series-title":"Proceedings of the IEEE\/CVF conference on computer vision and pattern recognition","first-page":"3191","article-title":"Combining weight pruning and knowledge distillation for cnn compression","author":"Aghli","year":"2021"},{"key":"10.1016\/j.neunet.2026.108804_bib0002","first-page":"7787","article-title":"Evoprompting: Language models for code-level neural architecture search","volume":"36","author":"Chen","year":"2024","journal-title":"Advances in Neural Information Processing Systems(NIPS)"},{"key":"10.1016\/j.neunet.2026.108804_bib0003","series-title":"Proceedings of the AAAI conference on artificial intelligence(AAAI)","first-page":"7028","article-title":"Cross-layer distillation with semantic calibration","volume":"vol. 35","author":"Chen","year":"2021"},{"key":"10.1016\/j.neunet.2026.108804_bib0004","doi-asserted-by":"crossref","first-page":"615","DOI":"10.1016\/j.neunet.2023.08.040","article-title":"Hierarchical knowledge propagation and distillation for few-shot learning","volume":"167","author":"Chunpeng","year":"2023","journal-title":"Neural Networks"},{"issue":"01","key":"10.1016\/j.neunet.2026.108804_bib0005","first-page":"1","article-title":"Distilled gradual pruning with pruned fine-tuning","volume":"1","author":"Fontana","year":"2024","journal-title":"IEEE Transactions on Artificial Intelligence"},{"key":"10.1016\/j.neunet.2026.108804_bib0006","doi-asserted-by":"crossref","first-page":"154","DOI":"10.1016\/j.neucom.2020.10.113","article-title":"Residual error based knowledge distillation","volume":"433","author":"Gao","year":"2021","journal-title":"Neurocomputing"},{"issue":"5","key":"10.1016\/j.neunet.2026.108804_bib0007","doi-asserted-by":"crossref","first-page":"7099","DOI":"10.1109\/TII.2022.3209672","article-title":"Multilevel attention-based sample correlations for knowledge distillation","volume":"19","author":"Gou","year":"2023","journal-title":"IEEE Transactions on Industrial Informatics"},{"key":"10.1016\/j.neunet.2026.108804_bib0008","first-page":"79570","article-title":"One-for-all: Bridge the gap between heterogeneous architectures in knowledge distillation","volume":"36","author":"Hao","year":"2024","journal-title":"Advances in Neural Information Processing Systems(NIPS)"},{"key":"10.1016\/j.neunet.2026.108804_bib0009","unstructured":"Hinton, G., Vinyals, O., & Dean, J. (2015). Distilling the knowledge in a neural network. arXiv: 1503.02531."},{"key":"10.1016\/j.neunet.2026.108804_bib0010","unstructured":"Howard, A. G., Zhu, M., Chen, B., Kalenichenko, D., Wang, W., Weyand, T., Andreetto, M., & Adam, H. (2017). MobileNets: Efficient convolutional neural networks for mobile vision applications. arXiv: 1704.04861."},{"key":"10.1016\/j.neunet.2026.108804_bib0011","article-title":"Knowledge distillation under ideal joint classifier assumption","volume":"173","author":"Huayu","year":"2024","journal-title":"Neural Networks"},{"key":"10.1016\/j.neunet.2026.108804_bib0012","article-title":"AdaDFKD: Exploring adaptive inter-sample relationship in data-free knowledge distillation","volume":"177","author":"Jingru","year":"2024","journal-title":"Neural Networks"},{"key":"10.1016\/j.neunet.2026.108804_bib0013","first-page":"598","article-title":"Optimal brain damage","volume":"2","author":"LeCun","year":"1989","journal-title":"Advances in Neural Information Processing Systems(NIPS)"},{"key":"10.1016\/j.neunet.2026.108804_bib0014","series-title":"Proceedings of the IEEE\/CVF conference on computer vision and pattern recognition(CVPR)","first-page":"1989","article-title":"Block-wisely supervised neural architecture search with knowledge distillation","author":"Li","year":"2020"},{"key":"10.1016\/j.neunet.2026.108804_bib0015","unstructured":"Li, H., Kadav, A., Durdanovic, I., Samet, H., & Graf, H. P. (2016). Pruning filters for efficient convnets. arXiv: 1608.08710."},{"key":"10.1016\/j.neunet.2026.108804_bib0016","series-title":"Proceedings of the AAAI conference on artificial intelligence(AAAI)","first-page":"1504","article-title":"Curriculum temperature for knowledge distillation","volume":"vol. 37","author":"Li","year":"2023"},{"key":"10.1016\/j.neunet.2026.108804_bib0017","series-title":"Proceedings of the IEEE\/CVF conference on computer vision and pattern recognition(CVPR)","first-page":"7539","article-title":"Search to distill: Pearls are everywhere but not the eyes","author":"Liu","year":"2020"},{"key":"10.1016\/j.neunet.2026.108804_bib0018","series-title":"Proceedings of the AAAI conference on artificial intelligence(AAAI)","first-page":"3918","article-title":"Entropy induced pruning framework for convolutional neural networks","volume":"vol. 38","author":"Lu","year":"2024"},{"key":"10.1016\/j.neunet.2026.108804_bib0019","series-title":"ICASSP 2021-2021 IEEE international conference on acoustics, speech and signal processing (icassp)","first-page":"7608","article-title":"Towards practical lipreading with distilled and efficient models","author":"Ma","year":"2021"},{"key":"10.1016\/j.neunet.2026.108804_bib0020","series-title":"Proceedings of the AAAI conference on artificial intelligence(AAAI)","first-page":"4233","article-title":"Understanding the role of the projector in knowledge distillation","volume":"vol. 38","author":"Miles","year":"2024"},{"key":"10.1016\/j.neunet.2026.108804_bib0021","series-title":"Proceedings of the AAAI conference on artificial intelligence(AAAI)","first-page":"5191","article-title":"Improved knowledge distillation via teacher assistant","volume":"vol. 34","author":"Mirzadeh","year":"2020"},{"key":"10.1016\/j.neunet.2026.108804_bib0022","unstructured":"Molchanov, P., Tyree, S., Karras, T., Aila, T., & Kautz, J. (2016). Pruning convolutional neural networks for resource efficient inference. arXiv: 1611.06440."},{"key":"10.1016\/j.neunet.2026.108804_bib0023","series-title":"European conference on computer vision(ECCV)","first-page":"120","article-title":"Prune your model before distill it","author":"Park","year":"2022"},{"issue":"8","key":"10.1016\/j.neunet.2026.108804_bib0024","doi-asserted-by":"crossref","first-page":"9060","DOI":"10.1109\/TII.2022.3224969","article-title":"A lightweight and adaptive knowledge distillation framework for remaining useful life prediction","volume":"19","author":"Ren","year":"2023","journal-title":"IEEE Transactions on Industrial Informatics"},{"key":"10.1016\/j.neunet.2026.108804_bib0025","series-title":"In international conference on learning representations(ICLR)","article-title":"Polytechnique montr\u00e9al, y. bengio, universit\u00e9 de montr\u00e9al, adriana romero, nicolas ballas, samira ebrahimi kahou, antoine chassang, carlo gatta, and yoshua bengio. fitnets: Hints for thin deep nets","author":"Romero","year":"2015"},{"key":"10.1016\/j.neunet.2026.108804_bib0026","article-title":"Data-free knowledge distillation via generator-free data generation for non-IID federated learning","volume":"179","author":"Siran","year":"2024","journal-title":"Neural Networks"},{"key":"10.1016\/j.neunet.2026.108804_bib0027","series-title":"Proceedings of the IEEE\/CVF international conference on computer vision(ICCV)","first-page":"1365","article-title":"Similarity-preserving knowledge distillation","author":"Tung","year":"2019"},{"key":"10.1016\/j.neunet.2026.108804_bib0028","doi-asserted-by":"crossref","DOI":"10.1016\/j.patcog.2022.109193","article-title":"FP-darts: Fast parallel differentiable neural architecture search for image classification","volume":"136","author":"Wang","year":"2023","journal-title":"Pattern Recognition"},{"key":"10.1016\/j.neunet.2026.108804_bib0029","series-title":"European conference on computer vision(ECCV)","first-page":"588","article-title":"Knowledge distillation meets self-supervision","author":"Xu","year":"2020"},{"key":"10.1016\/j.neunet.2026.108804_bib0030","series-title":"Proceedings of the IEEE\/CVF international conference on computer vision(ICCV)","first-page":"17185","article-title":"From knowledge distillation to self-knowledge distillation: A unified approach with normalized loss and customized soft labels","author":"Yang","year":"2023"},{"key":"10.1016\/j.neunet.2026.108804_bib0031","article-title":"Decoupled graph knowledge distillation: A general logits-based method for learning MLPs on graphs","volume":"179","author":"Yingjie","year":"2024","journal-title":"Neural Networks"},{"key":"10.1016\/j.neunet.2026.108804_bib0032","unstructured":"Zagoruyko, S., & Komodakis, N. (2016). Paying more attention to attention: Improving the performance of convolutional neural networks via attention transfer. arXiv: 1612.03928."},{"key":"10.1016\/j.neunet.2026.108804_bib0033","series-title":"Proceedings of the IEEE\/CVF international conference on computer vision(ICCV)","first-page":"3713","article-title":"Be your own teacher: Improve the performance of convolutional neural networks via self distillation","author":"Zhang","year":"2019"},{"key":"10.1016\/j.neunet.2026.108804_bib0034","series-title":"Proceedings of the IEEE conference on computer vision and pattern recognition(CVPR)","first-page":"6848","article-title":"ShuffleNet: An extremely efficient convolutional neural network for mobile devices","author":"Zhang","year":"2018"}],"container-title":["Neural Networks"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S0893608026002662?httpAccept=text\/xml","content-type":"text\/xml","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S0893608026002662?httpAccept=text\/plain","content-type":"text\/plain","content-version":"vor","intended-application":"text-mining"}],"deposited":{"date-parts":[[2026,4,14]],"date-time":"2026-04-14T14:59:20Z","timestamp":1776178760000},"score":1,"resource":{"primary":{"URL":"https:\/\/linkinghub.elsevier.com\/retrieve\/pii\/S0893608026002662"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2026,8]]},"references-count":34,"alternative-id":["S0893608026002662"],"URL":"https:\/\/doi.org\/10.1016\/j.neunet.2026.108804","relation":{},"ISSN":["0893-6080"],"issn-type":[{"value":"0893-6080","type":"print"}],"subject":[],"published":{"date-parts":[[2026,8]]},"assertion":[{"value":"Elsevier","name":"publisher","label":"This article is maintained by"},{"value":"SSA-KD: Self-structure-aware knowledge distillation for convolutional neural networks","name":"articletitle","label":"Article Title"},{"value":"Neural Networks","name":"journaltitle","label":"Journal Title"},{"value":"https:\/\/doi.org\/10.1016\/j.neunet.2026.108804","name":"articlelink","label":"CrossRef DOI link to publisher maintained version"},{"value":"article","name":"content_type","label":"Content Type"},{"value":"\u00a9 2026 Published by Elsevier Ltd.","name":"copyright","label":"Copyright"}],"article-number":"108804"}}