{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,12,31]],"date-time":"2025-12-31T12:21:33Z","timestamp":1767183693244},"reference-count":41,"publisher":"Springer Science and Business Media LLC","issue":"13","license":[{"start":{"date-parts":[[2024,5,19]],"date-time":"2024-05-19T00:00:00Z","timestamp":1716076800000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2024,5,19]],"date-time":"2024-05-19T00:00:00Z","timestamp":1716076800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"funder":[{"name":"Strategic Priority Research Program of Chinese Academy of Sciences","award":["Grant No. XDA19020102","Grant No. XDA19020102","Grant No. XDA19020102","Grant No. XDA19020102","Grant No. XDA19020102"],"award-info":[{"award-number":["Grant No. XDA19020102","Grant No. XDA19020102","Grant No. XDA19020102","Grant No. XDA19020102","Grant No. XDA19020102"]}]}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["J Supercomput"],"published-print":{"date-parts":[[2024,9]]},"DOI":"10.1007\/s11227-024-06162-1","type":"journal-article","created":{"date-parts":[[2024,5,19]],"date-time":"2024-05-19T10:01:24Z","timestamp":1716112884000},"page":"18600-18626","update-policy":"http:\/\/dx.doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":6,"title":["Data-free adaptive structured pruning for federated learning"],"prefix":"10.1007","volume":"80","author":[{"given":"Wei","family":"Fan","sequence":"first","affiliation":[]},{"given":"Keke","family":"Yang","sequence":"additional","affiliation":[]},{"given":"Yifan","family":"Wang","sequence":"additional","affiliation":[]},{"given":"Cong","family":"Chen","sequence":"additional","affiliation":[]},{"given":"Jing","family":"Li","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2024,5,19]]},"reference":[{"issue":"10","key":"6162_CR1","doi-asserted-by":"publisher","first-page":"7110","DOI":"10.1109\/JIOT.2021.3074382","volume":"9","author":"X Wang","year":"2021","unstructured":"Wang X, Garg S, Lin H, Hu J, Kaddoum G, Piran MJ, Hossain MS (2021) Toward accurate anomaly detection in industrial internet of things using hierarchical federated learning. IEEE Int Things J 9(10):7110\u20137119","journal-title":"IEEE Int Things J"},{"key":"6162_CR2","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1007\/s41666-020-00082-4","volume":"5","author":"J Xu","year":"2021","unstructured":"Xu J, Glicksberg BS, Su C, Walker P, Bian J, Wang F (2021) Federated learning for healthcare informatics. J Healthc Inform Res 5:1\u201319","journal-title":"J Healthc Inform Res"},{"key":"6162_CR3","doi-asserted-by":"crossref","unstructured":"Liu Q, Chen C, Qin J, Dou Q, Heng P-A (2021) Feddg: federated domain generalization on medical image segmentation via episodic learning in continuous frequency space. In: Proceedings of the IEEE\/CVF Conference on Computer Vision and Pattern Recognition, pp. 1013\u20131023","DOI":"10.1109\/CVPR46437.2021.00107"},{"key":"6162_CR4","volume":"33","author":"L Barbieri","year":"2022","unstructured":"Barbieri L, Savazzi S, Brambilla M, Nicoli M (2022) Decentralized federated learning for extended sensing in 6g connected vehicles. Veh Commun 33:100396","journal-title":"Veh Commun"},{"key":"6162_CR5","doi-asserted-by":"crossref","unstructured":"Liang X, Liu Y, Chen T, Liu M, Yang Q (2022) Federated transfer reinforcement learning for autonomous driving. In: Federated and Transfer Learning, pp. 357\u2013371","DOI":"10.1007\/978-3-031-11748-0_15"},{"key":"6162_CR6","doi-asserted-by":"crossref","unstructured":"Yarradoddi S, Gadekallu TR (2022) Federated learning role in big data, jot services and applications security, privacy and trust in jot a survey. In: Trust, Security and Privacy for Big Data, pp. 28\u201349","DOI":"10.1201\/9781003194538-2"},{"key":"6162_CR7","unstructured":"McMahan B, Moore E, Ramage D, Hampson S, Arcas BA (2017) Communication-efficient learning of deep networks from decentralized data. In: Artificial Intelligence and Statistics, pp. 1273\u20131282. PMLR"},{"key":"6162_CR8","doi-asserted-by":"publisher","first-page":"10374","DOI":"10.1109\/TNNLS.2022.3166101","volume":"12","author":"Y Jiang","year":"2022","unstructured":"Jiang Y, Wang S, Valls V, Ko BJ, Lee W-H, Leung KK, Tassiulas L (2022) Model pruning enables efficient federated learning on edge devices. IEEE Trans Neural Netw Learn Syst 12:10374\u201310386. https:\/\/doi.org\/10.1109\/TNNLS.2022.3166101","journal-title":"IEEE Trans Neural Netw Learn Syst"},{"key":"6162_CR9","doi-asserted-by":"crossref","unstructured":"Bibikar S, Vikalo H, Wang Z, Chen X (2022) Federated dynamic sparse training: Computing less, communicating less, yet learning better. In: Proceedings of the AAAI Conference on Artificial Intelligence, pp. 6080\u20136088","DOI":"10.1609\/aaai.v36i6.20555"},{"key":"6162_CR10","unstructured":"Li A, Sun J, Wang B, Duan L, Li S, Chen Y, Li H (2020) Lotteryfl: Personalized and communication-efficient federated learning with lottery ticket hypothesis on non-iid datasets. arXiv preprint arXiv:2008.03371"},{"key":"6162_CR11","unstructured":"Qiu X, Fernandez-Marques J, Gusmao PP, Gao Y, Parcollet T, Lane ND (2022) Zerofl: efficient on-device training for federated learning with local sparsity. arXiv preprint arXiv:2208.02507"},{"key":"6162_CR12","unstructured":"Diao E, Ding J, Tarokh V (2020) Heterofl: computation and communication efficient federated learning for heterogeneous clients. arXiv preprint arXiv:2010.01264"},{"key":"6162_CR13","first-page":"12876","volume":"34","author":"S Horvath","year":"2021","unstructured":"Horvath S, Laskaridis S, Almeida M, Leontiadis I, Venieris S, Lane N (2021) Fjord: Fair and accurate federated learning under heterogeneous targets with ordered dropout. Adv Neural Inf Process Syst 34:12876\u201312889","journal-title":"Adv Neural Inf Process Syst"},{"key":"6162_CR14","unstructured":"Zhou G, Xu K, Li Q, Liu Y, Zhao Y (2021) Adaptcl: efficient collaborative learning with dynamic and adaptive pruning. arXiv preprint arXiv:2106.14126"},{"key":"6162_CR15","unstructured":"Xie C, Koyejo S, Gupta I (2019) Asynchronous federated optimization. arXiv preprint arXiv:1903.03934"},{"key":"6162_CR16","doi-asserted-by":"crossref","unstructured":"Chen Y, Ning Y, Slawski M, Rangwala H (2020) Asynchronous online federated learning for edge devices with non-iid data. In: 2020 IEEE International Conference on Big Data (Big Data), pp. 15\u201324 . IEEE","DOI":"10.1109\/BigData50022.2020.9378161"},{"key":"6162_CR17","unstructured":"Cai Y, Hua W, Chen H, Suh GE, De\u00a0Sa C, Zhang Z (2022) Structured pruning is all you need for pruning cnns at initialization. arXiv preprint arXiv:2203.02549"},{"key":"6162_CR18","first-page":"6377","volume":"33","author":"H Tanaka","year":"2020","unstructured":"Tanaka H, Kunin D, Yamins DL, Ganguli S (2020) Pruning neural networks without any data by iteratively conserving synaptic flow. Adv Neural Inf Process Syst 33:6377\u20136389","journal-title":"Adv Neural Inf Process Syst"},{"key":"6162_CR19","unstructured":"Frankle J, Dziugaite GK, Roy DM, Carbin M (2020) Pruning neural networks at initialization: Why are we missing the mark? arXiv preprint arXiv:2009.08576"},{"key":"6162_CR20","first-page":"20390","volume":"33","author":"J Su","year":"2020","unstructured":"Su J, Chen Y, Cai T, Wu T, Gao R, Wang L, Lee JD (2020) Sanity-checking pruning methods: Random tickets can win the jackpot. Adv Neural Inf Process Syst 33:20390\u201320401","journal-title":"Adv Neural Inf Process Syst"},{"key":"6162_CR21","first-page":"429","volume":"2","author":"T Li","year":"2020","unstructured":"Li T, Sahu AK, Zaheer M, Sanjabi M, Talwalkar A, Smith V (2020) Federated optimization in heterogeneous networks. Proc Mach Learn Syst 2:429\u2013450","journal-title":"Proc Mach Learn Syst"},{"issue":"12","key":"6162_CR22","doi-asserted-by":"publisher","first-page":"6600","DOI":"10.1103\/PhysRevA.39.6600","volume":"39","author":"SA Janowsky","year":"1989","unstructured":"Janowsky SA (1989) Pruning versus clipping in neural networks. Phys Rev A 39(12):6600","journal-title":"Phys Rev A"},{"key":"6162_CR23","unstructured":"Molchanov P, Tyree S, Karras T, Aila T, Kautz J (2016) Pruning convolutional neural networks for resource efficient inference. arXiv preprint arXiv:1611.06440"},{"key":"6162_CR24","unstructured":"Hassibi B, Stork D (1992) Second order derivatives for network pruning: Optimal brain surgeon. In: Proceedings of the 5th international conference on Neural Information Processing Systems. Morgan Kaufmann Publishers Inc, San Francisco, CA, USA, pp 164\u2013171"},{"key":"6162_CR25","unstructured":"Han S, Pool J, Tran J, Dally W (2015) Learning both weights and connections for efficient neural network. In: Proceedings of the 28th international conference on Neural Information Processing Systems. Montreal,    Canada, pp 1135\u20131143"},{"key":"6162_CR26","doi-asserted-by":"crossref","unstructured":"Molchanov P, Mallya A, Tyree S, Frosio I, Kautz J (2019) Importance estimation for neural network pruning. In: Proceedings of the IEEE\/CVF Conference on Computer Vision and Pattern Recognition, pp. 11264\u201311272","DOI":"10.1109\/CVPR.2019.01152"},{"key":"6162_CR27","unstructured":"Frankle J, Carbin M (2018) The lottery ticket hypothesis: Finding sparse, trainable neural networks. arXiv preprint arXiv:1803.03635"},{"key":"6162_CR28","unstructured":"Lee N, Ajanthan T, Torr PH (2018) Snip: Single-shot network pruning based on connection sensitivity. arXiv preprint arXiv:1810.02340"},{"key":"6162_CR29","unstructured":"Wang C, Zhang G, Grosse R (2020) Picking winning tickets before training by preserving gradient flow. arXiv preprint arXiv:2002.07376"},{"key":"6162_CR30","doi-asserted-by":"crossref","unstructured":"Liu S, Yu G, Yin R, Yuan J, Qu F (2020) Adaptive batchsize selection and gradient compression for wireless federated learning. In: GLOBECOM 2020-2020 IEEE Global Communications Conference, pp. 1\u20136. IEEE","DOI":"10.1109\/GLOBECOM42002.2020.9322122"},{"issue":"1","key":"6162_CR31","doi-asserted-by":"publisher","first-page":"231","DOI":"10.1109\/TCOMM.2021.3124961","volume":"70","author":"S Liu","year":"2021","unstructured":"Liu S, Yu G, Yin R, Yuan J, Shen L, Liu C (2021) Joint model pruning and device selection for communication-efficient federated edge learning. IEEE Trans Commun 70(1):231\u2013244","journal-title":"IEEE Trans Commun"},{"key":"6162_CR32","doi-asserted-by":"publisher","DOI":"10.1109\/TWC.2023.3329450","author":"X Liu","year":"2023","unstructured":"Liu X, Wang S, Deng Y, Nallanathan A (2023) Adaptive federated pruning in hierarchical wireless networks. IEEE Trans Wirel Commun. https:\/\/doi.org\/10.1109\/TWC.2023.3329450","journal-title":"IEEE Trans Wirel Commun"},{"key":"6162_CR33","doi-asserted-by":"publisher","DOI":"10.1109\/TWC.2023.3342626","author":"Z Chen","year":"2023","unstructured":"Chen Z, Yi W, Shin H, Nallanathan A (2023) Adaptive model pruning for communication and computation efficient wireless federated learning. IEEE Trans Wirel Commun. https:\/\/doi.org\/10.1109\/TWC.2023.3342626","journal-title":"IEEE Trans Wirel Commun"},{"key":"6162_CR34","unstructured":"Krizhevsky A, Hinton G et al. (2009) Learning multiple layers of features from tiny images. Technical report, University of Toronto. https:\/\/www.cs.toronto.edu\/~kriz\/learning-features-2009-TR.pdf"},{"key":"6162_CR35","unstructured":"Tan M, Le QV (2019) Efficientnet: Rethinking model scaling for convolutional neural networks. In: Chaudhuri K, Salakhutdinov R (eds.) Proceedings of the 36th International Conference on Machine Learning, ICML 2019, 9-15 June 2019, Long Beach, California, USA. Proceedings of Machine Learning Research, vol. 97, pp. 6105\u20136114. http:\/\/proceedings.mlr.press\/v97\/tan19a.html"},{"key":"6162_CR36","unstructured":"Hsieh K, Phanishayee A, Mutlu O, Gibbons PB (2020) The non-iid data quagmire of decentralized machine learning. In: Proceedings of the 37th International Conference on Machine Learning, ICML 2020, 13-18 July 2020, Virtual Event. Proceedings of Machine Learning Research, vol. 119, pp. 4387\u20134398. http:\/\/proceedings.mlr.press\/v119\/hsieh20a.html"},{"key":"6162_CR37","doi-asserted-by":"publisher","unstructured":"Wu Y, He K (2018) Group normalization. In: Ferrari V, Hebert M, Sminchisescu C, Weiss Y (eds.) Computer Vision - ECCV 2018 - 15th European Conference, Munich, Germany, September 8-14, 2018, Proceedings, Part XIII. Lecture Notes in Computer Science, vol. 11217, pp. 3\u201319. https:\/\/doi.org\/10.1007\/978-3-030-01261-8_1","DOI":"10.1007\/978-3-030-01261-8_1"},{"key":"6162_CR38","doi-asserted-by":"crossref","unstructured":"Wang L, Xu S, Wang X, Zhu Q (2021) Addressing class imbalance in federated learning. In: Proceedings of the AAAI Conference on Artificial Intelligence, pp. 10165\u201310173","DOI":"10.1609\/aaai.v35i11.17219"},{"key":"6162_CR39","doi-asserted-by":"crossref","unstructured":"Li Q, He B, Song D (2021) Model-contrastive federated learning. In: Proceedings of the IEEE\/CVF Conference on Computer Vision and Pattern Recognition, pp. 10713\u201310722","DOI":"10.1109\/CVPR46437.2021.01057"},{"key":"6162_CR40","unstructured":"Paszke A, Gross S, Massa F, Lerer A, Bradbury J, Chanan G, Killeen T, Lin Z, Gimelshein N, Antiga L, Desmaison A,  Kopf A, Yang E,  DeVito Z, Raison M, Tejani A, Chilamkurthy S, Steiner B, Fang L,  Bai J,  Chintala S (2019) Pytorch: An imperative style, high-performance deep learning library. In: Wallach H, Larochelle H, Beygelzimer A, d\u2019Alch\u2019e F,  Fox E, Garnett R (eds) Advances in neural information processing systems. https:\/\/proceedings.neurips.cc\/paper_files\/paper\/2019\/file\/bdbca288fee7f92f2bfa9f7012727740-Paper.pdf"},{"key":"6162_CR41","first-page":"1","volume":"638","author":"M Series","year":"2009","unstructured":"Series M (2009) Guidelines for evaluation of radio interface technologies for imt-advanced. Rep ITU 638:1\u201372","journal-title":"Rep ITU"}],"container-title":["The Journal of Supercomputing"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11227-024-06162-1.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s11227-024-06162-1\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11227-024-06162-1.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,7,25]],"date-time":"2024-07-25T10:12:50Z","timestamp":1721902370000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s11227-024-06162-1"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,5,19]]},"references-count":41,"journal-issue":{"issue":"13","published-print":{"date-parts":[[2024,9]]}},"alternative-id":["6162"],"URL":"https:\/\/doi.org\/10.1007\/s11227-024-06162-1","relation":{},"ISSN":["0920-8542","1573-0484"],"issn-type":[{"value":"0920-8542","type":"print"},{"value":"1573-0484","type":"electronic"}],"subject":[],"published":{"date-parts":[[2024,5,19]]},"assertion":[{"value":"21 April 2024","order":1,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"19 May 2024","order":2,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Declarations"}},{"value":"The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this article.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Conflict of interest"}}]}}