{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,8,15]],"date-time":"2025-08-15T02:42:40Z","timestamp":1755225760378,"version":"3.43.0"},"reference-count":54,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"9","license":[{"start":{"date-parts":[[2025,9,1]],"date-time":"2025-09-01T00:00:00Z","timestamp":1756684800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2025,9,1]],"date-time":"2025-09-01T00:00:00Z","timestamp":1756684800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,9,1]],"date-time":"2025-09-01T00:00:00Z","timestamp":1756684800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62025208","62421002","62376278"],"award-info":[{"award-number":["62025208","62421002","62376278"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Young Elite Scientists Sponsorship Program by CAST","award":["2022QNRC001"],"award-info":[{"award-number":["2022QNRC001"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Comput."],"published-print":{"date-parts":[[2025,9]]},"DOI":"10.1109\/tc.2025.3583827","type":"journal-article","created":{"date-parts":[[2025,6,27]],"date-time":"2025-06-27T13:45:10Z","timestamp":1751031910000},"page":"2977-2990","source":"Crossref","is-referenced-by-count":0,"title":["BHerd: Accelerating Federated Learning by Selecting Beneficial Herd of Local Gradients"],"prefix":"10.1109","volume":"74","author":[{"ORCID":"https:\/\/orcid.org\/0000-0001-5132-5831","authenticated-orcid":false,"given":"Ping","family":"Luo","sequence":"first","affiliation":[{"name":"National Key Laboratory of Parallel and Distributed Computing, College of Computer Science and Technology, National University of Defense Technology, ChangSha, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-0622-1202","authenticated-orcid":false,"given":"Xiaoge","family":"Deng","sequence":"additional","affiliation":[{"name":"Intelligent Game and Decision Lab, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0009-0009-1728-7611","authenticated-orcid":false,"given":"Ziqing","family":"Wen","sequence":"additional","affiliation":[{"name":"National Key Laboratory of Parallel and Distributed Computing, College of Computer Science and Technology, National University of Defense Technology, ChangSha, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-5024-1900","authenticated-orcid":false,"given":"Tao","family":"Sun","sequence":"additional","affiliation":[{"name":"National Key Laboratory of Parallel and Distributed Computing, College of Computer Science and Technology, National University of Defense Technology, ChangSha, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-9743-2034","authenticated-orcid":false,"given":"Dongsheng","family":"Li","sequence":"additional","affiliation":[{"name":"National Key Laboratory of Parallel and Distributed Computing, College of Computer Science and Technology, National University of Defense Technology, ChangSha, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1016\/j.teler.2024.100116"},{"key":"ref2","first-page":"70355","article-title":"Tpugraphs: A performance prediction dataset on large tensor computational graphs","volume":"36","author":"Phothilimthana","year":"2024","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1145\/3604933"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1016\/j.iot.2022.100674"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1109\/JSAC.2019.2904348"},{"article-title":"Federated learning: Strategies for improving communication efficiency","year":"2016","author":"Kone\u010dn\u1ef3","key":"ref6"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/TC.2022.3180968"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/TC.2024.3477945"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1109\/TC.2021.3068219"},{"key":"ref10","first-page":"1273","article-title":"Communication-efficient learning of deep networks from decentralized data","volume-title":"Proc. Artif. Intell. Statist.","author":"McMahan","year":"2017"},{"issue":"213","key":"ref11","first-page":"1","article-title":"Cooperative sgd: A unified framework for the design and analysis of local-update sgd algorithms","volume":"22","author":"Wang","year":"2021","journal-title":"J. Mach. Learn. Res."},{"key":"ref12","first-page":"1367","article-title":"Computational optimal transport: Complexity by accelerated gradient descent is better than by Sinkhorn\u2019s algorithm","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Dvurechensky","year":"2018"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2019.2952219"},{"key":"ref14","first-page":"1579","article-title":"Tight analyses for non-smooth stochastic gradient descent","volume-title":"Proc. Conf. Learn. Theory","author":"Harvey","year":"2019"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1109\/ICDCS.2019.00173"},{"author":"Sun","key":"ref16","article-title":"Gradient normalization makes heterogeneous distributed learning as easy as homogenous distributed learning"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2022.3196503"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2020.3013541"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2019.2944481"},{"key":"ref20","first-page":"5972","article-title":"No fear of heterogeneity: Classifier calibration for federated learning with non-iid data","volume":"34","author":"Luo","year":"2021","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2021.3095078"},{"key":"ref22","first-page":"20\u2009596","article-title":"Deep learning on a data diet: Finding important examples early in training","volume":"34","author":"Paul","year":"2021","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"article-title":"Dataset pruning: Reducing training data by examining generalization influence","year":"2022","author":"Yang","key":"ref23"},{"key":"ref24","first-page":"8969","article-title":"GRAB: Finding provably better data permutations than random reshuffling","volume":"35","author":"Lu","year":"2022","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1109\/JSAC.2022.3211564"},{"key":"ref26","first-page":"3557","article-title":"Personalized federated learning with theoretical guarantees: A model-agnostic meta-learning approach","volume":"33","author":"Fallah","year":"2020","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"key":"ref27","first-page":"1126","article-title":"Model-agnostic meta-learning for fast adaptation of deep networks","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Finn","year":"2017"},{"key":"ref28","first-page":"21394","article-title":"Personalized federated learning with Moreau envelopes","volume":"33","author":"Dinh","year":"2020","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"key":"ref29","first-page":"4427","article-title":"Federated multi-task learning","volume":"30","author":"Smith","year":"2017","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"article-title":"Variational federated multi-task learning","year":"2019","author":"Corinzia","key":"ref30"},{"article-title":"Federated meta-learning with fast convergence and efficient communication","year":"2018","author":"Chen","key":"ref31"},{"key":"ref32","first-page":"12878","article-title":"Data-free knowledge distillation for heterogeneous federated learning","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Zhu","year":"2021"},{"key":"ref33","first-page":"2351","article-title":"Ensemble distillation for robust model fusion in federated learning","volume":"33","author":"Lin","year":"2020","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"article-title":"Federated adversarial domain adaptation","year":"2019","author":"Peng","key":"ref34"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1016\/j.neucom.2021.07.098"},{"key":"ref36","article-title":"FedMix: Approximation of mixup under mean augmented federated learning","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Yoon","year":"2021"},{"key":"ref37","article-title":"mixup: Beyond empirical risk minimization","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Zhang","year":"2018"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2022.3195549"},{"article-title":"Mitigating data heterogeneity in federated learning with data augmentation","year":"2022","author":"Back de Luca","key":"ref39"},{"key":"ref40","doi-asserted-by":"publisher","DOI":"10.1109\/INFOCOM41043.2020.9155494"},{"key":"ref41","doi-asserted-by":"publisher","DOI":"10.23919\/EUSIPCO54536.2021.9616052"},{"article-title":"Federated learning with non-iid data","year":"2018","author":"Zhao","key":"ref42"},{"key":"ref43","doi-asserted-by":"publisher","DOI":"10.1109\/ICPR48806.2021.9412599"},{"article-title":"Hybrid-FL: Cooperative learning mechanism using non-iid data in wireless networks","year":"2019","author":"Yoshida","key":"ref44"},{"key":"ref45","first-page":"17309","article-title":"Random reshuffling: Simple analysis with vast improvements","volume":"33","author":"Mishchenko","year":"2020","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"key":"ref46","article-title":"FedShuffle: Recipes for better use of local work in federated learning","volume-title":"Transactions on Machine Learning Research","author":"Horv\u00e1th","year":"2022"},{"key":"ref47","first-page":"7611","article-title":"Tackling the objective inconsistency problem in heterogeneous federated optimization","volume":"33","author":"Wang","year":"2020","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"key":"ref48","first-page":"5132","article-title":"Scaffold: Stochastic controlled averaging for federated learning","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Karimireddy","year":"2020"},{"key":"ref49","article-title":"Federated learning based on dynamic regularization","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Durmus","year":"2021"},{"key":"ref50","doi-asserted-by":"publisher","DOI":"10.1109\/5.726791"},{"author":"Krizhevsky","key":"ref51","article-title":"Learning multiple layers of features from tiny images"},{"key":"ref52","first-page":"142","article-title":"Learning word vectors for sentiment analysis","volume-title":"Proc. 49th Annu. Meeting Assoc. Comput. Linguistics: Human Lang. Technol.","author":"Maas","year":"2011"},{"key":"ref53","doi-asserted-by":"publisher","DOI":"10.1109\/WACV.2017.58"},{"key":"ref54","doi-asserted-by":"publisher","DOI":"10.1137\/16M1080173"}],"container-title":["IEEE Transactions on Computers"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/12\/11121311\/11053664.pdf?arnumber=11053664","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,8,13]],"date-time":"2025-08-13T17:34:04Z","timestamp":1755106444000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11053664\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,9]]},"references-count":54,"journal-issue":{"issue":"9"},"URL":"https:\/\/doi.org\/10.1109\/tc.2025.3583827","relation":{},"ISSN":["0018-9340","1557-9956","2326-3814"],"issn-type":[{"type":"print","value":"0018-9340"},{"type":"electronic","value":"1557-9956"},{"type":"electronic","value":"2326-3814"}],"subject":[],"published":{"date-parts":[[2025,9]]}}}