{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,9]],"date-time":"2026-04-09T05:19:43Z","timestamp":1775711983053,"version":"3.50.1"},"reference-count":65,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"4","license":[{"start":{"date-parts":[[2022,7,1]],"date-time":"2022-07-01T00:00:00Z","timestamp":1656633600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/legalcode"}],"funder":[{"DOI":"10.13039\/501100002347","name":"Bundesministerium f\u00fcr Bildung und Forschung","doi-asserted-by":"publisher","id":[{"id":"10.13039\/501100002347","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Berlin Institute for the Foundations of Learning and Data","award":["01IS18025A"],"award-info":[{"award-number":["01IS18025A"]}]},{"name":"Berlin Institute for the Foundations of Learning and Data","award":["01IS18037I"],"award-info":[{"award-number":["01IS18037I"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Netw. Sci. Eng."],"published-print":{"date-parts":[[2022,7,1]]},"DOI":"10.1109\/tnse.2021.3081748","type":"journal-article","created":{"date-parts":[[2021,5,19]],"date-time":"2021-05-19T21:36:05Z","timestamp":1621460165000},"page":"2025-2038","source":"Crossref","is-referenced-by-count":63,"title":["CFD: Communication-Efficient Federated Distillation via Soft-Label Quantization and Delta Coding"],"prefix":"10.1109","volume":"9","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-9425-2238","authenticated-orcid":false,"given":"Felix","family":"Sattler","sequence":"first","affiliation":[{"name":"Department of Artificial Intelligence, Fraunhofer Heinrich Hertz Institute, Berlin, Germany"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-3065-0103","authenticated-orcid":false,"given":"Arturo","family":"Marban","sequence":"additional","affiliation":[{"name":"Department of Artificial Intelligence, Fraunhofer Heinrich Hertz Institute, Berlin, Germany"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-2657-2811","authenticated-orcid":false,"given":"Roman","family":"Rischke","sequence":"additional","affiliation":[{"name":"Department of Artificial Intelligence, Fraunhofer Heinrich Hertz Institute, Berlin, Germany"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-6283-3265","authenticated-orcid":false,"given":"Wojciech","family":"Samek","sequence":"additional","affiliation":[{"name":"Department of Artificial Intelligence, Fraunhofer Heinrich Hertz Institute, Berlin, Germany"}]}],"member":"263","reference":[{"key":"ref1","first-page":"1273","article-title":"Communication-efficient learning of deep networks from decentralized data","volume-title":"Proc. 20th Int. Conf. Artif. Intell. Statist.","author":"McMahan","year":"2017"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1561\/2200000083"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/MSP.2020.2975749"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/MNET.001.1900506"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1109\/MCOM.001.1900461"},{"issue":"1","key":"ref6","first-page":"53","article-title":"Trends and advancements in deep neural network communication","volume":"3","author":"Sattler","year":"2020"},{"key":"ref7","article-title":"Communication-efficient on-device machine learning: Federated distillation and augmentation under non-iid private data","author":"Jeong","year":"2018"},{"key":"ref8","article-title":"FedMD: Heterogenous federated learning via model distillation","author":"Li","year":"2019"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1109\/PIMRC.2019.8904164"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1109\/LCOMM.2020.3003693"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/TMC.2021.3070013"},{"key":"ref12","first-page":"2351","article-title":"Ensemble distillation for robust model fusion in federated learning","volume-title":"Advances Neural Inf. Process. Syst.","volume":"33","author":"Lin","year":"2020"},{"key":"ref13","article-title":"Making bayesian model ensemble applicable to federated learning","author":"Chen","year":"2020"},{"key":"ref14","article-title":"Federated learning: Strategies for improving communication efficiency","author":"Konen\u1ef3","year":"2016"},{"key":"ref15","article-title":"Expanding the reach of federated learning by reducing client resource requirements","author":"Caldas","year":"2018"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.90"},{"key":"ref17","first-page":"1877","article-title":"Language models are few-shot learners","volume-title":"Advances Neural Inf. Process. Syst.","volume":"33","author":"Brown","year":"2020"},{"key":"ref18","first-page":"2306","article-title":"Universal approximation with deep narrow networks","volume-title":"Proc. Conf. Learn. Theory, Ser. Mach. Learn. Res.","volume":"125","author":"Kidger","year":"2020"},{"key":"ref19","article-title":"A closer look at the approximation capabilities of neural networks","volume-title":"Proc. 8th Int. Conf. Learn. Representations. Open Review.net","author":"Chong","year":"2020"},{"key":"ref20","first-page":"103","article-title":"GPipe: Efficient training of giant neural networks using pipeline parallelism","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"32","author":"Huang","year":"2019"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1109\/ICC.2019.8761315"},{"key":"ref22","first-page":"598","article-title":"Optimal brain damage","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"2","author":"LeCun","year":"1990"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/d17-1045"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1109\/IJCNN.2019.8852172"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2019.2944481"},{"key":"ref26","first-page":"3123","article-title":"BinaryConnect: Training deep neural networks with binary weights during propagations","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"28","author":"Courbariaux","year":"2015"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP49357.2023.10094626"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2020.3041185"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1109\/ICIP40778.2020.9190821"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1109\/JSTSP.2020.2969554"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2021.3129371"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2009.5206848"},{"key":"ref33","article-title":"Pointer sentinel mixture models","volume-title":"Proc. 5th Int. Conf. Learn. Representations","author":"Merity","year":"2017"},{"key":"ref34","doi-asserted-by":"publisher","DOI":"10.1145\/3133956.3134012"},{"key":"ref35","first-page":"16937","article-title":"Inverting gradients-how easy is it to break privacy in federated learning?","volume":"33","author":"Geiping","year":"2020"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1017\/9781108966559.019"},{"key":"ref37","first-page":"22593","article-title":"Distributed distillation for on-device learning","volume-title":"Proc. 34th Conf. Neural Inf. Process. Syst.","author":"Bistritz","year":"2020"},{"key":"ref38","article-title":"One-shot federated learning","author":"Guha","year":"2019"},{"key":"ref39","article-title":"Cronus: Robust and heterogeneous collaborative learning with black-box knowledge transfer","author":"Chang","year":"2019"},{"key":"ref40","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP40776.2020.9054168"},{"key":"ref41","doi-asserted-by":"publisher","DOI":"10.1109\/TSP.2020.3046971"},{"key":"ref42","article-title":"Federated learning with non-iid data","author":"Zhao","year":"2018"},{"key":"ref43","article-title":"On the convergence of FedAvg on non-iid data","volume-title":"Proc. 8th Int. Conf. Learn. Representations. Open Review.net","author":"Li","year":"2020"},{"key":"ref44","article-title":"Federated optimization in heterogeneous networks","volume-title":"Proc. Mach. Learn. Syst.","author":"Li","year":"2020"},{"key":"ref45","first-page":"4424","article-title":"Federated multi-task learning","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"30","author":"Smith","year":"2017"},{"key":"ref46","article-title":"A theoretical perspective on differentially private federated multi-task learning","author":"Wu","year":"2020"},{"key":"ref47","article-title":"Robust federated learning in a heterogeneous environment","author":"Ghosh","year":"2019"},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2020.3015958"},{"key":"ref49","article-title":"Measuring the effects of non-identical data distribution for federated visual classification","author":"Hsu","year":"2019"},{"key":"ref50","doi-asserted-by":"publisher","DOI":"10.1007\/978-1-4757-3264-1"},{"key":"ref51","doi-asserted-by":"publisher","DOI":"10.1109\/19.492748"},{"key":"ref52","volume-title":"Introduction to Data Compression","author":"Sayood","year":"2017"},{"key":"ref53","doi-asserted-by":"publisher","DOI":"10.1109\/TCSVT.2003.815173"},{"key":"ref54","doi-asserted-by":"publisher","DOI":"10.1109\/IJCNN.2017.7966217"},{"key":"ref55","first-page":"215","article-title":"An analysis of single-layer networks in unsupervised feature learning","volume-title":"Proc. 14th Int. Conf. Artif. Intell. Statist.","author":"Coates","year":"2011"},{"key":"ref56","first-page":"1631","article-title":"Recursive deep models for semantic compositionality over a sentiment treebank","volume-title":"Proc. Conf. Empirical Methods Natural Lang. Process.","author":"Socher","year":"2013"},{"key":"ref57","first-page":"649","article-title":"Character-level convolutional networks for text classification","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"28","author":"Zhang","year":"2015"},{"key":"ref58","first-page":"396","article-title":"Handwritten digit recognition with a back-propagation network","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"2","author":"LeCun","year":"1989"},{"key":"ref59","article-title":"Very deep convolutional networks for large-scale image recognition","volume-title":"3rd Int. Conf. Learn. Representations","author":"Simonyan","year":"2015"},{"key":"ref60","doi-asserted-by":"publisher","DOI":"10.5555\/2999134.2999257"},{"key":"ref61","article-title":"DistilBERT, a distilled version of BERT: Smaller, faster, cheaper and lighter","author":"Sanh","year":"2019"},{"key":"ref62","article-title":"Adam: A method for stochastic optimization","author":"Kingma","year":"2014"},{"key":"ref63","article-title":"Improving federated learning personalization via model agnostic meta learning","author":"Jiang","year":"2019"},{"key":"ref64","article-title":"Lag: Lazily aggregated gradient for communication-efficient distributed learning","volume-title":"Advances in Neural Information Processing Systems","volume":"31","author":"Chen","year":"2018"},{"key":"ref65","article-title":"Communication efficient distributed learning with censored, quantized, and generalized group admm","author":"Issaid","year":"2020"}],"container-title":["IEEE Transactions on Network Science and Engineering"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/6488902\/9808096\/09435947.pdf?arnumber=9435947","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,1,9]],"date-time":"2024-01-09T23:48:10Z","timestamp":1704844090000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9435947\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2022,7,1]]},"references-count":65,"journal-issue":{"issue":"4"},"URL":"https:\/\/doi.org\/10.1109\/tnse.2021.3081748","relation":{},"ISSN":["2327-4697","2334-329X"],"issn-type":[{"value":"2327-4697","type":"electronic"},{"value":"2334-329X","type":"electronic"}],"subject":[],"published":{"date-parts":[[2022,7,1]]}}}