{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,1,18]],"date-time":"2025-01-18T05:07:15Z","timestamp":1737176835678,"version":"3.33.0"},"reference-count":36,"publisher":"IEEE","license":[{"start":{"date-parts":[[2024,12,15]],"date-time":"2024-12-15T00:00:00Z","timestamp":1734220800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,12,15]],"date-time":"2024-12-15T00:00:00Z","timestamp":1734220800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024,12,15]]},"DOI":"10.1109\/bigdata62323.2024.10825820","type":"proceedings-article","created":{"date-parts":[[2025,1,16]],"date-time":"2025-01-16T18:31:23Z","timestamp":1737052283000},"page":"7774-7783","source":"Crossref","is-referenced-by-count":0,"title":["FLeNS: Federated Learning with Enhanced Nesterov-Newton Sketch"],"prefix":"10.1109","author":[{"given":"Sunny","family":"Gupta","sequence":"first","affiliation":[{"name":"Indian Institute of Technology, Bombay,Koita Centre for Digital Health,Mumbai,India"}]},{"given":"Mohit","family":"Jindal","sequence":"additional","affiliation":[{"name":"Indian Institute of Technology, Bombay,Department of Electrical Engineering,Mumbai,India"}]},{"given":"Pankhi","family":"Kashyap","sequence":"additional","affiliation":[{"name":"Indian Institute of Technology, Bombay,Koita Centre for Digital Health,Mumbai,India"}]},{"given":"Pranav","family":"Jeevan","sequence":"additional","affiliation":[{"name":"Indian Institute of Technology, Bombay,Department of Electrical Engineering,Mumbai,India"}]},{"given":"Amit","family":"Sethi","sequence":"additional","affiliation":[{"name":"Indian Institute of Technology, Bombay,Department of Electrical Engineering,Mumbai,India"}]}],"member":"263","reference":[{"article-title":"Federated learning: Strategies for improving communication efficiency","year":"2016","author":"Konecn\u02c7y","key":"ref1"},{"key":"ref2","first-page":"1273","article-title":"Communication-efficient learning of deep networks from decentralized data","volume-title":"Artificial Intelligence and Statistics","author":"McMahan","year":"2017"},{"article-title":"Federated optimization in heterogeneous networks","volume-title":"Proceedings of Machine Learning and Systems","author":"Li","key":"ref3"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-89188-6_3"},{"key":"ref5","first-page":"1","article-title":"Optimal convergence rates for distributed nystrom approximation","volume":"24","author":"Li","year":"2023","journal-title":"Journal of Machine Learning Research"},{"article-title":"On the convergence of fedavg on non-iid data","volume-title":"ICLR","author":"Li","key":"ref6"},{"key":"ref7","first-page":"5132","article-title":"Scaffold: Stochastic controlled averaging for federated learning","volume-title":"International Conference on Machine Learning","author":"Karimireddy"},{"key":"ref8","first-page":"7057","article-title":"Fedsplit: an algorithmic framework for fast federated optimization","volume-title":"NeurIPS","volume":"33","author":"Pathak"},{"key":"ref9","first-page":"9050","article-title":"Sharp bounds for federated averaging (local sgd) and continuous perspective","volume-title":"International Conference on Artificial Intelligence and Statistics","author":"Glasgow"},{"key":"ref10","first-page":"4615","article-title":"Agnostic federated learning","volume-title":"International Conference on Machine Learning","author":"Mohri"},{"article-title":"A non-parametric view of fedavg and fedprox: Beyond stationary points","year":"2021","author":"Su","key":"ref11"},{"key":"ref12","article-title":"What do we mean by generalization in federated learning?","author":"Yuan","year":"2022","journal-title":"ICLR"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1017\/CBO9780511804441"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1137\/16m1080173"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1093\/imamat\/6.1.76"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1007\/BF01589116"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1137\/0719025"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1162\/08997660260028683"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1137\/15M1021106"},{"key":"ref20","first-page":"543","article-title":"A method for solving the convex programming problem with convergence rate o (1\/k2)","volume":"269","author":"Nesterov","year":"1983","journal-title":"Dokl akad nauk Sssr"},{"key":"ref21","first-page":"2835","article-title":"Newton-less: Sparsification without trade-offs for the sketched newton update","volume-title":"NeurIPS","author":"Derezinski"},{"key":"ref22","first-page":"19377","article-title":"Effective dimension adaptive sketching methods for faster regularized least-squares optimization","volume-title":"NeurIPS","author":"Lacotte"},{"key":"ref23","first-page":"5926","article-title":"Adaptive newton sketch: linear-time optimization with quadratic convergence and effective hessian dimensionality","volume-title":"International Conference on Machine Learning","author":"Lacotte"},{"key":"ref24","first-page":"18028","article-title":"Distributed newton can communicate less and resist byzantine workers","volume-title":"NeurIPS","volume":"33","author":"Ghosh"},{"article-title":"Localnewton: Reducing communication bottleneck for distributed learning","year":"2021","author":"Gupta","key":"ref25"},{"key":"ref26","first-page":"5861","article-title":"Fednew: A communicationefficient and privacy-preserving newton-type method for federated learning","volume-title":"International Conference on Machine Learning","author":"Elgabli"},{"key":"ref27","first-page":"18959","article-title":"Fednl: Making newton-type methods applicable to federated learning","volume-title":"International Conference on Machine Learning","author":"Safaryan"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1016\/j.automatica.2023.111460"},{"key":"ref29","doi-asserted-by":"crossref","DOI":"10.1137\/1.9781611971200","volume-title":"Numerical methods for unconstrained optimization and nonlinear equations","volume":"16","author":"Dennis","year":"1996"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1145\/1132516.1132597"},{"issue":"147","key":"ref31","first-page":"1","article-title":"Optimal convergence for distributed learning with stochastic gradient methods and spectral algorithms","volume":"21","author":"Lin","year":"2020","journal-title":"Journal of Machine Learning Research"},{"article-title":"A newton-type algorithm for federated learning based on incremental hessian eigenvector sharing","year":"2022","author":"Fabbro","key":"ref32"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v38i12.29254"},{"key":"ref34","volume-title":"Introductory lectures on convex optimization: A basic course","volume":"87","author":"Nesterov","year":"2013"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1137\/070704277"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1145\/1961189.1961199"}],"event":{"name":"2024 IEEE International Conference on Big Data (BigData)","start":{"date-parts":[[2024,12,15]]},"location":"Washington, DC, USA","end":{"date-parts":[[2024,12,18]]}},"container-title":["2024 IEEE International Conference on Big Data (BigData)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/10824975\/10824942\/10825820.pdf?arnumber=10825820","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,1,17]],"date-time":"2025-01-17T07:43:37Z","timestamp":1737099817000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10825820\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,12,15]]},"references-count":36,"URL":"https:\/\/doi.org\/10.1109\/bigdata62323.2024.10825820","relation":{},"subject":[],"published":{"date-parts":[[2024,12,15]]}}}