{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,12,12]],"date-time":"2025-12-12T13:45:26Z","timestamp":1765547126118,"version":"3.37.3"},"reference-count":43,"publisher":"Springer Science and Business Media LLC","issue":"S2","license":[{"start":{"date-parts":[[2023,8,5]],"date-time":"2023-08-05T00:00:00Z","timestamp":1691193600000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2023,8,5]],"date-time":"2023-08-05T00:00:00Z","timestamp":1691193600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"crossref","award":["No.61936008"],"award-info":[{"award-number":["No.61936008"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"crossref"}]}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["Artif Intell Rev"],"published-print":{"date-parts":[[2023,11]]},"DOI":"10.1007\/s10462-023-10566-5","type":"journal-article","created":{"date-parts":[[2023,8,5]],"date-time":"2023-08-05T02:01:53Z","timestamp":1691200913000},"page":"1897-1917","update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":10,"title":["Deep neural network pruning method based on sensitive layers and reinforcement learning"],"prefix":"10.1007","volume":"56","author":[{"given":"Wenchuan","family":"Yang","sequence":"first","affiliation":[]},{"given":"Haoran","family":"Yu","sequence":"additional","affiliation":[]},{"given":"Baojiang","family":"Cui","sequence":"additional","affiliation":[]},{"given":"Runqi","family":"Sui","sequence":"additional","affiliation":[]},{"given":"Tianyu","family":"Gu","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2023,8,5]]},"reference":[{"key":"10566_CR1","unstructured":"Ashok A, Rhinehart N, Beainy F, Kitani KM (2017) N2n learning: network to network compression via policy gradient reinforcement learning. ArXiv preprint arXiv:1709.06030"},{"key":"10566_CR2","unstructured":"Brock A, Lim T, Ritchie JM, Weston N, Smash (2017). One-shot model architecture search through hypernetworks. ArXiv preprint arXiv:1708.05344"},{"key":"10566_CR3","unstructured":"Cai H, Chen T, Zhang W, Wang J (2017) Reinforcement learning for architecture search by network transformation. ArXiv preprint arXiv:1707.04873"},{"key":"10566_CR4","doi-asserted-by":"publisher","unstructured":"Carreira-Perpinan MA, Idelbayev Y (2018) Learning compression algorithms for neural net pruning. In: Computer vision and pattern recognition (CVPR). https:\/\/doi.org\/10.1109\/CVPR.2018.00890","DOI":"10.1109\/CVPR.2018.00890"},{"key":"10566_CR5","doi-asserted-by":"publisher","DOI":"10.5555\/3045118.3045361","author":"W Chen","year":"2015","unstructured":"Chen W, Wilson JT, Tyree S, Weinberger KQ, Chen Y (2015) Compressing neural networks with the hashing trick. Int Conf Mach Learn. https:\/\/doi.org\/10.5555\/3045118.3045361","journal-title":"Int Conf Mach Learn"},{"key":"10566_CR6","unstructured":"Chen T, Goodfellow I, Shlens J (2015b) Net2net: accelerating learning via knowledge transfer. ArXiv preprint arXiv:1511.05641"},{"key":"10566_CR8","doi-asserted-by":"crossref","unstructured":"Chollet F. Xception (2016) Deep learning with depthwise separable convolutions. ArXiv preprint arXiv:1610.02357","DOI":"10.1109\/CVPR.2017.195"},{"key":"10566_CR9","doi-asserted-by":"publisher","DOI":"10.1007\/s10462-020-09816-7","author":"T Choudhary","year":"2020","unstructured":"Choudhary T, Mishra V, Goswami A et al (2020) A comprehensive survey on model compression and acceleration. Artif Intell Rev. https:\/\/doi.org\/10.1007\/s10462-020-09816-7","journal-title":"Artif Intell Rev"},{"key":"10566_CR10","doi-asserted-by":"publisher","DOI":"10.1007\/s10462-022-10213-5","author":"S Cong","year":"2023","unstructured":"Cong S, Zhou Y (2023) A review of convolutional neural network architectures and their optimizations. Artif Intell Rev. https:\/\/doi.org\/10.1007\/s10462-022-10213-5","journal-title":"Artif Intell Rev"},{"key":"10566_CR11","doi-asserted-by":"publisher","unstructured":"Denton EL, Zaremba W, Bruna J, LeCun Y, Fergus R (2014) Exploiting linear structure within convolutional networks for efficient evaluation. In: Neural information processing systems (NeurIPS). https:\/\/doi.org\/10.5555\/2968826.2968968","DOI":"10.5555\/2968826.2968968"},{"key":"10566_CR12","unstructured":"Emmons S, Eysenbach B, Kostrikov I et al (2021) RvS: what is essential for offline RL via supervised learning? ArXiv preprint arxiv.org\/abs\/2112.10751"},{"key":"10566_CR13","unstructured":"Han S, Mao H, Dally WJ (2015a) Deep compression: compressing deep neural networks with pruning,trained quantization and huffman coding. ArXiv preprint arxiv.org\/abs\/1510.00149v5"},{"key":"10566_CR14","doi-asserted-by":"publisher","unstructured":"Han S, Pool J, Tran J, Dally WJ (2015b) Learning both weights and connections for efficient neural network. In: Neural information processing systems (NeurIPS). https:\/\/doi.org\/10.5555\/2969239.2969366","DOI":"10.5555\/2969239.2969366"},{"key":"10566_CR15","doi-asserted-by":"publisher","unstructured":"Han S, Liu X, Mao H, Pu J, Pedram A, Horowitz MA, Dally WJ (2016) Eie: efficient inference engine on compressed deep neural network. In: International conference on computer architecture (ISCA). https:\/\/doi.org\/10.1109\/isca.2016.30","DOI":"10.1109\/isca.2016.30"},{"key":"10566_CR16","doi-asserted-by":"publisher","unstructured":"He K, Zhang X, Ren S, Sun J (2016a) Deep residual learning for image recognition. In: Computer vision and pattern recognition (CVPR). pp. 770\u2013778. https:\/\/doi.org\/10.1109\/CVPR.2016.90","DOI":"10.1109\/CVPR.2016.90"},{"key":"10566_CR17","doi-asserted-by":"publisher","unstructured":"He K, Zhang X, Ren S, Sun J (2016b) Deep residual learning for image recognition. In: Computer vision and pattern recognition (CVPR). https:\/\/doi.org\/10.1109\/CVPR.2016.90","DOI":"10.1109\/CVPR.2016.90"},{"key":"10566_CR18","doi-asserted-by":"publisher","unstructured":"He Y, Kang G, Dong X, Fu Y, Yang Y (2018a) Soft filter pruning for accelerating deep convolutional neural networks. In: International joint conference on artificial intelligence (IJCAI). https:\/\/doi.org\/10.5555\/3304889.3304970","DOI":"10.5555\/3304889.3304970"},{"key":"10566_CR19","doi-asserted-by":"publisher","unstructured":"He Y, Lin J, Liu Z et al (2018b) Amc: Automl for model compression and acceleration on mobile devices. In: The European conference on computer vision (ECCV), pp. 784\u2013800. https:\/\/doi.org\/10.48550\/arXiv.1802.03494","DOI":"10.48550\/arXiv.1802.03494"},{"key":"10566_CR20","doi-asserted-by":"crossref","unstructured":"Huang Z, Wang N (2018) Data-driven sparse structure selection for deep neural networks. ArXiv preprint arXiv:1707.01213","DOI":"10.1007\/978-3-030-01270-0_19"},{"key":"10566_CR21","doi-asserted-by":"publisher","unstructured":"Huang G, Liu Z, Van Der Maaten L, Weinberger KQ (2017) Densely connected convolutional networks. In: computer vision and pattern recognition (CVPR). https:\/\/doi.org\/10.1109\/CVPR.2017.243","DOI":"10.1109\/CVPR.2017.243"},{"key":"10566_CR22","unstructured":"Krizhevsky A, Hinton G et al (2009) Learning multiple layers of features from tiny images. In: Technical report, Citeseer"},{"key":"10566_CR23","unstructured":"Li H, Kadav A, Durdanovic I, Samet H, Graf HP (2017) Pruning filters for efficient convnets. ArXiv preprint arXiv:1608.08710"},{"key":"10566_CR24","unstructured":"Lillicrap TP, Hunt JJ, Pritzel A, Heess N, Erez T, Tassa Y, Silver D, Wierstra D (2015) Continuous control with deep reinforcement learning. ArXiv preprint arXiv:1509.02971"},{"key":"10566_CR25","doi-asserted-by":"publisher","unstructured":"Lin S, Ji R, Yan C, Zhang B, Cao L, Ye Q, Huang F, Doermann D (2019) Towards optimal structured cnn pruning via generative adversarial learning. In: Computer vision and pattern recognition (CVPR). https:\/\/doi.org\/10.1109\/CVPR.2019.00290","DOI":"10.1109\/CVPR.2019.00290"},{"key":"10566_CR26","doi-asserted-by":"crossref","unstructured":"Lin M, Ji R, Wang Y, Zhang Y (2020a) HRank: filter pruning using high-rank feature map. ArXiv preprint arXiv:2002.10179v2","DOI":"10.1109\/CVPR42600.2020.00160"},{"key":"10566_CR27","doi-asserted-by":"crossref","unstructured":"Lin M, Ji R, Zhang Y, Zhang B, Wu Y, Tian Y (2020b) Channel pruning via automatic structure search. ArXiv preprint arXiv:2001.08565","DOI":"10.24963\/ijcai.2020\/94"},{"key":"10566_CR28","doi-asserted-by":"publisher","unstructured":"Liu N, Ma X, Xu Z, Wang Y, Tang J, Ye J (2019) AutoCompress: an automatic DNN structured pruning framework for ultra-high compression rates. In: AAAI Conference on artificial intelligence. https:\/\/doi.org\/10.1609\/aaai.v34i04.5924","DOI":"10.1609\/aaai.v34i04.5924"},{"key":"10566_CR29","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2021.3127689","author":"M Lopez-Martin","year":"2021","unstructured":"Lopez-Martin M, Sanchez-Esguevillas A, Arribas JI et al (2021) Network intrusion detection based on extended rbf neural network with offline reinforcement learning. IEEE Access. https:\/\/doi.org\/10.1109\/ACCESS.2021.3127689","journal-title":"IEEE Access"},{"key":"10566_CR30","doi-asserted-by":"crossref","unstructured":"Luo JH, Wu J, Lin W (2017) Thinet: a filter level pruning method for deep neural network compression. ArXiv preprint arXiv:1707.06342","DOI":"10.1109\/ICCV.2017.541"},{"key":"10566_CR31","unstructured":"Miikkulainen R, Liang J, Meyerson E, Rawal A, Fink D, Francon O, Raju B, Navruzyan A, Duffy N, Hodjat B (2017) Evolving deep neural networks. ArXiv preprint arXiv:1703.00548"},{"key":"10566_CR32","unstructured":"Park J, Li S, Wen W, Tang PTP, Li H, Chen Y, Dubey P (2016) Faster cnns with direct sparse convolutions and guided pruning. ArXiv preprint arXiv:1608.01409"},{"key":"10566_CR33","unstructured":"Paszke A, Gross S, Chintala S, Chanan G et al (2017) Automatic differentiation in pytorch. In: Neural information processing systems (NeurIPS)"},{"key":"10566_CR34","unstructured":"Real E, Moore S, Selle A, Saxena S, Suematsu YL, Le Q, Kurakin A (2017) Large-scale evolution of image classifiers. ArXiv preprint arXiv:1703.01041"},{"key":"10566_CR35","doi-asserted-by":"publisher","DOI":"10.1007\/s11263-015-0816-y","author":"O Russakovsky","year":"2015","unstructured":"Russakovsky O, Deng J, Su H, Krause J et al (2015) Imagenet large scale visual recognition challenge. Int J Comput Vis. https:\/\/doi.org\/10.1007\/s11263-015-0816-y","journal-title":"Int J Comput Vis"},{"key":"10566_CR36","unstructured":"Simonyan K, Zisserman A (2014) Very deep convolutional networks for large-scale image recognition. ArXiv preprint arXiv:1409.1556"},{"key":"10566_CR37","doi-asserted-by":"publisher","DOI":"10.1162\/106365602320169811","author":"KO Stanley","year":"2017","unstructured":"Stanley KO, Miikkulainen R (2017) Evolving neural networks through augmenting topologies. Evolut Comput. https:\/\/doi.org\/10.1162\/106365602320169811","journal-title":"Evolut Comput"},{"key":"10566_CR38","doi-asserted-by":"publisher","DOI":"10.1007\/s10462-022-10141-4","author":"W Su","year":"2022","unstructured":"Su W, Li L, Liu F et al (2022) AI on the edge: a comprehensive review. Artif Intell Rev. https:\/\/doi.org\/10.1007\/s10462-022-10141-4","journal-title":"Artif Intell Rev"},{"key":"10566_CR39","unstructured":"Suau X, Zappella L, Palakkode V, Apostoloff N (2018) Principal filter analysis for guided network compression. ArXiv preprint arXiv:1807.10585"},{"key":"10566_CR40","doi-asserted-by":"crossref","unstructured":"Vadera S, Ameen S (2021) Methods for pruning deep neural networks. ArXiv preprint arXiv:2011.00241v2","DOI":"10.1109\/ACCESS.2022.3182659"},{"key":"10566_CR41","doi-asserted-by":"publisher","DOI":"10.1109\/JSTSP.2020.2977090","author":"J Wang","year":"2020","unstructured":"Wang J, Bai H, Wu J, Cheng J (2020) Bayesian automatic model compression. IEEE J Sel Top Signal Process. https:\/\/doi.org\/10.1109\/JSTSP.2020.2977090","journal-title":"IEEE J Sel Top Signal Process"},{"key":"10566_CR42","unstructured":"Wang D, Zhou L, Zhang X, Bai X, Zhou J (2018) Exploring linear relationship in feature map subspace for convnets compression. ArXiv preprint arXiv:1803.05729"},{"key":"10566_CR44","unstructured":"Ye J, Lu X, Lin Z, Wang JZ (2018) Rethinking the smaller-norm-less-informative assumption in channel pruning of convolution layers. ArXiv preprint arXiv:1802.00124"},{"key":"10566_CR45","doi-asserted-by":"crossref","unstructured":"Zhan H, Lin WM, Cao Y (2021) Deep model compression via two-stage deep reinforcement learning. ArXiv preprint arXiv:1912.0225","DOI":"10.1007\/978-3-030-86486-6_15"}],"container-title":["Artificial Intelligence Review"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s10462-023-10566-5.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s10462-023-10566-5\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s10462-023-10566-5.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2023,11,13]],"date-time":"2023-11-13T19:06:31Z","timestamp":1699902391000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s10462-023-10566-5"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2023,8,5]]},"references-count":43,"journal-issue":{"issue":"S2","published-print":{"date-parts":[[2023,11]]}},"alternative-id":["10566"],"URL":"https:\/\/doi.org\/10.1007\/s10462-023-10566-5","relation":{},"ISSN":["0269-2821","1573-7462"],"issn-type":[{"type":"print","value":"0269-2821"},{"type":"electronic","value":"1573-7462"}],"subject":[],"published":{"date-parts":[[2023,8,5]]},"assertion":[{"value":"5 August 2023","order":1,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Declarations"}},{"value":"All authors certify that they have no affiliations or involvement in any organization or entity with any financial interest or non-financial interest in the subject matter or materials discussed in this manuscript.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Conflict of interest"}}]}}