{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,1,31]],"date-time":"2026-01-31T03:59:13Z","timestamp":1769831953159,"version":"3.49.0"},"publisher-location":"New York, NY, USA","reference-count":43,"publisher":"ACM","license":[{"start":{"date-parts":[[2022,10,3]],"date-time":"2022-10-03T00:00:00Z","timestamp":1664755200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"funder":[{"name":"SES","award":["1952007"],"award-info":[{"award-number":["1952007"]}]},{"name":"NSF grants CAREER","award":["CNS-2110259, CNS-2112471, CNS-2102233, CCF-2110252, CCF 1934884"],"award-info":[{"award-number":["CNS-2110259, CNS-2112471, CNS-2102233, CCF-2110252, CCF 1934884"]}]}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2022,10,3]]},"DOI":"10.1145\/3492866.3549723","type":"proceedings-article","created":{"date-parts":[[2022,9,21]],"date-time":"2022-09-21T16:34:33Z","timestamp":1663778073000},"page":"71-80","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":14,"title":["NET-FLEET"],"prefix":"10.1145","author":[{"given":"Xin","family":"Zhang","sequence":"first","affiliation":[{"name":"Iowa State University"}]},{"given":"Minghong","family":"Fang","sequence":"additional","affiliation":[{"name":"The Ohio State University"}]},{"given":"Zhuqing","family":"Liu","sequence":"additional","affiliation":[{"name":"The Ohio State University"}]},{"given":"Haibo","family":"Yang","sequence":"additional","affiliation":[{"name":"The Ohio State University"}]},{"given":"Jia","family":"Liu","sequence":"additional","affiliation":[{"name":"The Ohio State University"}]},{"given":"Zhengyuan","family":"Zhu","sequence":"additional","affiliation":[{"name":"Iowa State University"}]}],"member":"320","published-online":{"date-parts":[[2022,10,3]]},"reference":[{"key":"e_1_3_2_2_1_1","doi-asserted-by":"publisher","DOI":"10.1016\/j.ijmedinf.2018.01.007"},{"key":"e_1_3_2_2_2_1","doi-asserted-by":"publisher","DOI":"10.14722\/ndss.2021.24434"},{"key":"e_1_3_2_2_3_1","first-page":"165","article-title":"Optimal distributed online prediction using mini-batches","volume":"13","author":"Dekel O.","year":"2012","unstructured":"Dekel, O., Gilad-Bachrach, R., Shamir, O., and Xiao, L. Optimal distributed online prediction using mini-batches. The Journal of Machine Learning Research 13 (2012), 165--202.","journal-title":"The Journal of Machine Learning Research"},{"key":"e_1_3_2_2_4_1","first-page":"687","volume-title":"Proceedings of the 32nd International Conference on Neural Information Processing Systems","author":"Fang C.","year":"2018","unstructured":"Fang, C., Li, C. J., Lin, Z., and Zhang, T. Spider: near-optimal non-convex optimization via stochastic path integrated differential estimator. In Proceedings of the 32nd International Conference on Neural Information Processing Systems (2018), pp. 687--697."},{"key":"e_1_3_2_2_5_1","volume-title":"Periodic stochastic gradient descent with momentum for decentralized training. arXiv preprint arXiv:2008.10435","author":"Gao H.","year":"2020","unstructured":"Gao, H., and Huang, H. Periodic stochastic gradient descent with momentum for decentralized training. arXiv preprint arXiv:2008.10435 (2020)."},{"key":"e_1_3_2_2_6_1","doi-asserted-by":"publisher","DOI":"10.1137\/120880811"},{"key":"e_1_3_2_2_7_1","volume-title":"On the convergence of local descent methods in federated learning. arXiv preprint arXiv:1910.14425","author":"Haddadpour F.","year":"2019","unstructured":"Haddadpour, F., and Mahdavi, M. On the convergence of local descent methods in federated learning. arXiv preprint arXiv:1910.14425 (2019)."},{"key":"e_1_3_2_2_8_1","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.90"},{"key":"e_1_3_2_2_9_1","volume-title":"Accelerating stochastic gradient descent using predictive variance reduction. Advances in neural information processing systems 26","author":"Johnson R.","year":"2013","unstructured":"Johnson, R., and Zhang, T. Accelerating stochastic gradient descent using predictive variance reduction. Advances in neural information processing systems 26 (2013), 315--323."},{"key":"e_1_3_2_2_10_1","doi-asserted-by":"publisher","DOI":"10.1561\/2200000083"},{"key":"e_1_3_2_2_11_1","doi-asserted-by":"publisher","DOI":"10.1109\/MWC.001.1900119"},{"key":"e_1_3_2_2_12_1","first-page":"5132","volume-title":"International Conference on Machine Learning","author":"Karimireddy S. P.","year":"2020","unstructured":"Karimireddy, S. P., Kale, S., Mohri, M., Reddi, S., Stich, S., and Suresh, A. T. Scaffold: Stochastic controlled averaging for federated learning. In International Conference on Machine Learning (2020), PMLR, pp. 5132--5143."},{"key":"e_1_3_2_2_13_1","volume-title":"ICML Workshop on Federated Learning for User Privacy and Data Confidentiality","author":"Khanduri P.","year":"2021","unstructured":"Khanduri, P., Sharma, P., Yang, H., Hong, M., Liu, J., Rajawat, K., and Varshney, P. K. Achieving optimal sample and communication complexities for non-iid federated learning. In ICML Workshop on Federated Learning for User Privacy and Data Confidentiality (2021)."},{"key":"e_1_3_2_2_14_1","unstructured":"Krizhevsky A. Hinton G. et al. Learning multiple layers of features from tiny images."},{"key":"e_1_3_2_2_15_1","volume-title":"Mnist handwritten digit database. Available: http:\/\/yann.lecun.com\/exdb\/mnist","author":"LeCun Y.","year":"1998","unstructured":"LeCun, Y., Cortes, C., and Burges, C. Mnist handwritten digit database. Available: http:\/\/yann.lecun.com\/exdb\/mnist (1998)."},{"key":"e_1_3_2_2_16_1","doi-asserted-by":"publisher","DOI":"10.1109\/MSP.2020.2975749"},{"key":"e_1_3_2_2_17_1","volume-title":"Communication efficient decentralized training with multiple local updates. arXiv preprint arXiv:1910.09126","author":"Li X.","year":"2019","unstructured":"Li, X., Yang, W., Wang, S., and Zhang, Z. Communication efficient decentralized training with multiple local updates. arXiv preprint arXiv:1910.09126 (2019)."},{"key":"e_1_3_2_2_18_1","volume-title":"Advances in Neural Information Processing Systems","volume":"30","author":"Lian X.","year":"2017","unstructured":"Lian, X., Zhang, C., Zhang, H., Hsieh, C.-J., Zhang, W., and Liu, J. Can decentralized algorithms outperform centralized algorithms? a case study for decentralized parallel stochastic gradient descent. In Advances in Neural Information Processing Systems (2017), vol. 30."},{"key":"e_1_3_2_2_19_1","volume-title":"Variance reduced local SGD with lower communication complexity. arXiv preprint arXiv:1912.12844","author":"Liang X.","year":"2019","unstructured":"Liang, X., Shen, S., Liu, J., Pan, Z., Chen, E., and Cheng, Y. Variance reduced local SGD with lower communication complexity. arXiv preprint arXiv:1912.12844 (2019)."},{"key":"e_1_3_2_2_20_1","volume-title":"Don't use large mini-batches, use local sgd. arXiv preprint arXiv:1808.07217","author":"Lin T.","year":"2018","unstructured":"Lin, T., Stich, S. U., Patel, K. K., and Jaggi, M. Don't use large mini-batches, use local sgd. arXiv preprint arXiv:1808.07217 (2018)."},{"key":"e_1_3_2_2_21_1","doi-asserted-by":"publisher","DOI":"10.1109\/DSW.2019.8755807"},{"key":"e_1_3_2_2_22_1","doi-asserted-by":"publisher","DOI":"10.1109\/CISS48834.2020.1570617414"},{"key":"e_1_3_2_2_23_1","first-page":"1273","volume-title":"PMLR","author":"McMahan B.","unstructured":"McMahan, B., Moore, E., Ramage, D., Hampson, S., and y Arcas, B. A. Communication-efficient learning of deep networks from decentralized data. In Artificial Intelligence and Statistics (2017), PMLR, pp. 1273--1282."},{"key":"e_1_3_2_2_24_1","doi-asserted-by":"publisher","DOI":"10.1109\/TAC.2008.2009515"},{"key":"e_1_3_2_2_25_1","volume-title":"Distributed stochastic gradient tracking methods. Mathematical Programming","author":"Pu S.","year":"2020","unstructured":"Pu, S., and Nedi\u0107, A. Distributed stochastic gradient tracking methods. Mathematical Programming (2020), 1--49."},{"key":"e_1_3_2_2_26_1","first-page":"3","article-title":"Harnessing smoothness to accelerate distributed optimization","volume":"5","author":"Qu G.","year":"2017","unstructured":"Qu, G., and Li, N. Harnessing smoothness to accelerate distributed optimization. IEEE Transactions on Control of Network Systems 5, 3 (2017), 1245--1260.","journal-title":"IEEE Transactions on Control of Network Systems"},{"key":"e_1_3_2_2_27_1","volume-title":"On the convergence of federated optimization in heterogeneous networks. arXiv preprint arXiv:1812.06127 3","author":"Sahu A. K.","year":"2018","unstructured":"Sahu, A. K., Li, T., Sanjabi, M., Zaheer, M., Talwalkar, A., and Smith, V. On the convergence of federated optimization in heterogeneous networks. arXiv preprint arXiv:1812.06127 3 (2018)."},{"key":"e_1_3_2_2_28_1","volume-title":"Local sgd converges fast and communicates little. arXiv preprint arXiv:1805.09767","author":"Stich S. U.","year":"2018","unstructured":"Stich, S. U. Local sgd converges fast and communicates little. arXiv preprint arXiv:1805.09767 (2018)."},{"key":"e_1_3_2_2_29_1","first-page":"1","article-title":"The error-feedback framework: Better rates for sgd with delayed gradients and compressed updates","volume":"21","author":"Stich S. U.","year":"2020","unstructured":"Stich, S. U., and Karimireddy, S. P. The error-feedback framework: Better rates for sgd with delayed gradients and compressed updates. Journal of Machine Learning Research 21 (2020), 1--36.","journal-title":"Journal of Machine Learning Research"},{"key":"e_1_3_2_2_30_1","first-page":"22","article-title":"Cooperative sgd: A unified framework for the design and analysis of local-update sgd algorithms","author":"Wang J.","year":"2021","unstructured":"Wang, J., and Joshi, G. Cooperative sgd: A unified framework for the design and analysis of local-update sgd algorithms. Journal of Machine Learning Research 22 (2021).","journal-title":"Journal of Machine Learning Research"},{"key":"e_1_3_2_2_31_1","doi-asserted-by":"publisher","DOI":"10.1109\/JSAC.2019.2904348"},{"key":"e_1_3_2_2_32_1","doi-asserted-by":"publisher","DOI":"10.1109\/TSP.2021.3062553"},{"key":"e_1_3_2_2_33_1","first-page":"1","article-title":"Federated learning for healthcare informatics","author":"Xu J.","year":"2020","unstructured":"Xu, J., Glicksberg, B. S., Su, C., Walker, P., Bian, J., and Wang, F. Federated learning for healthcare informatics. Journal of Healthcare Informatics Research (2020), 1--19.","journal-title":"Journal of Healthcare Informatics Research ("},{"key":"e_1_3_2_2_34_1","volume-title":"International Conference on Learning Representations","author":"Yang H.","year":"2021","unstructured":"Yang, H., Fang, M., and Liu, J. Achieving linear speedup with partial worker participation in non-i.i.d. federated learning. In International Conference on Learning Representations (2021)."},{"key":"e_1_3_2_2_35_1","doi-asserted-by":"publisher","DOI":"10.23919\/WiOpt52861.2021.9589061"},{"key":"e_1_3_2_2_36_1","first-page":"25331","volume-title":"International Conference on Machine Learning","author":"Yang H.","year":"2022","unstructured":"Yang, H., Zhang, X., Khanduri, P., and Liu, J. Anarchic federated learning. In International Conference on Machine Learning (2022), PMLR, pp. 25331--25363."},{"key":"e_1_3_2_2_37_1","doi-asserted-by":"publisher","DOI":"10.1145\/3298981"},{"key":"e_1_3_2_2_38_1","first-page":"7184","volume-title":"International Conference on Machine Learning","author":"Yu H.","year":"2019","unstructured":"Yu, H., Jin, R., and Yang, S. On the linear speedup analysis of communication efficient momentum sgd for distributed non-convex optimization. In International Conference on Machine Learning (2019), PMLR, pp. 7184--7193."},{"key":"e_1_3_2_2_39_1","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v33i01.33015693"},{"key":"e_1_3_2_2_40_1","doi-asserted-by":"publisher","DOI":"10.1137\/130943170"},{"key":"e_1_3_2_2_41_1","doi-asserted-by":"publisher","DOI":"10.1109\/TSP.2018.2818081"},{"key":"e_1_3_2_2_42_1","unstructured":"Zhang X. Fang M. Liu Z. Yang H. Liu J. and Zhu Z. Net-fleet: Achieving linear convergence speedup for fully decentralized federated learning with heterogeneous data. https:\/\/kevinliu-osu.github.io\/publications\/FLEET_TR.pdf."},{"key":"e_1_3_2_2_43_1","volume-title":"Federated learning with non-iid data. arXiv preprint arXiv:1806.00582","author":"Zhao Y.","year":"2018","unstructured":"Zhao, Y., Li, M., Lai, L., Suda, N., Civin, D., and Chandra, V. Federated learning with non-iid data. arXiv preprint arXiv:1806.00582 (2018)."}],"event":{"name":"MobiHoc '22: The Twenty-third International Symposium on Theory, Algorithmic Foundations, and Protocol Design for Mobile Networks and Mobile Computing","location":"Seoul Republic of Korea","acronym":"MobiHoc '22","sponsor":["SIGMOBILE ACM Special Interest Group on Mobility of Systems, Users, Data and Computing"]},"container-title":["Proceedings of the Twenty-Third International Symposium on Theory, Algorithmic Foundations, and Protocol Design for Mobile Networks and Mobile Computing"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3492866.3549723","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3492866.3549723","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,17]],"date-time":"2025-06-17T20:48:27Z","timestamp":1750193307000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3492866.3549723"}},"subtitle":["achieving linear convergence speedup for fully decentralized federated learning with heterogeneous data"],"short-title":[],"issued":{"date-parts":[[2022,10,3]]},"references-count":43,"alternative-id":["10.1145\/3492866.3549723","10.1145\/3492866"],"URL":"https:\/\/doi.org\/10.1145\/3492866.3549723","relation":{},"subject":[],"published":{"date-parts":[[2022,10,3]]},"assertion":[{"value":"2022-10-03","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}