{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,9,28]],"date-time":"2025-09-28T06:46:08Z","timestamp":1759041968847,"version":"3.28.0"},"reference-count":20,"publisher":"IEEE","license":[{"start":{"date-parts":[[2020,5,1]],"date-time":"2020-05-01T00:00:00Z","timestamp":1588291200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2020,5,1]],"date-time":"2020-05-01T00:00:00Z","timestamp":1588291200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2020,5,1]],"date-time":"2020-05-01T00:00:00Z","timestamp":1588291200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2020,5]]},"DOI":"10.1109\/icassp40776.2020.9052956","type":"proceedings-article","created":{"date-parts":[[2020,4,9]],"date-time":"2020-04-09T20:21:13Z","timestamp":1586463673000},"page":"3137-3141","source":"Crossref","is-referenced-by-count":3,"title":["On Distributed Stochastic Gradient Descent for Nonconvex Functions in the Presence of Byzantines"],"prefix":"10.1109","author":[{"given":"Saikiran","family":"Bulusu","sequence":"first","affiliation":[]},{"given":"Prashant","family":"Khanduri","sequence":"additional","affiliation":[]},{"given":"Pranay","family":"Sharma","sequence":"additional","affiliation":[]},{"given":"Pramod K.","family":"Varshney","sequence":"additional","affiliation":[]}],"member":"263","reference":[{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1145\/357172.357176"},{"key":"ref11","first-page":"5636","article-title":"Byzantine-robust distributed learning: Towards optimal statistical rates","author":"yin","year":"2018","journal-title":"International Conference on Machine Learning"},{"key":"ref12","first-page":"119","article-title":"Machine learning with adversaries: Byzantine tolerant gradient descent","author":"blanchard","year":"2017","journal-title":"Advances in neural information processing systems"},{"key":"ref13","article-title":"The hidden vulnerability of distributed learning in byzantium","author":"el-mhamdi","year":"2018","journal-title":"International Conference on Machine Learning"},{"article-title":"signsgd with majority vote is communication efficient and byzantine fault tolerant","year":"2018","author":"bernstein","key":"ref14"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v33i01.33011544"},{"key":"ref16","first-page":"4613","article-title":"Byzantine stochastic gradient descent","author":"alistarh","year":"2018","journal-title":"Advances in neural information processing systems"},{"article-title":"Generalized byzantine-tolerant sgd","year":"2018","author":"xie","key":"ref17"},{"key":"ref18","first-page":"7184","article-title":"On the linear speedup analysis of communication efficient momentum sgd for distributed non-convex optimization","author":"yu","year":"2019","journal-title":"International Conference on Machine Learning"},{"key":"ref19","first-page":"2525","article-title":"A linear speedup analysis of distributed deep learning with sparse and quantized communication","author":"jiang","year":"2018","journal-title":"Advances in neural information processing systems"},{"key":"ref4","first-page":"2595","article-title":"Parallelized stochastic gradient descent","author":"zinkevich","year":"2010","journal-title":"Advances in neural information processing systems"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1017\/CBO9781139042918"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1145\/2020408.2020426"},{"key":"ref5","first-page":"5330","article-title":"Can decentralized algorithms outperform centralized algorithms? a case study for decentralized parallel stochastic gradient descent","author":"lian","year":"2017","journal-title":"Advances in neural information processing systems"},{"key":"ref8","first-page":"165","article-title":"Optimal distributed online prediction using minibatches","volume":"13","author":"dekel","year":"2012","journal-title":"Journal of Machine Learning Research"},{"key":"ref7","first-page":"693","article-title":"Hogwild: A lock-free approach to parallelizing stochastic gradient descent","author":"recht","year":"2011","journal-title":"Advances in neural information processing systems"},{"article-title":"Federated learning: Strategies for improving communication efficiency","year":"2016","author":"kone?n\u00fd","key":"ref2"},{"key":"ref1","doi-asserted-by":"crossref","first-page":"436","DOI":"10.1038\/nature14539","article-title":"Deep learning","volume":"521","author":"lecun","year":"2015","journal-title":"Nature"},{"key":"ref9","first-page":"1223","article-title":"More effective distributed ml via a stale synchronous parallel parameter server","author":"ho","year":"2013","journal-title":"Advances in neural information processing systems"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1214\/aop\/1176988477"}],"event":{"name":"ICASSP 2020 - 2020 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)","start":{"date-parts":[[2020,5,4]]},"location":"Barcelona, Spain","end":{"date-parts":[[2020,5,8]]}},"container-title":["ICASSP 2020 - 2020 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/9040208\/9052899\/09052956.pdf?arnumber=9052956","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,6,28]],"date-time":"2022-06-28T00:23:58Z","timestamp":1656375838000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9052956\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2020,5]]},"references-count":20,"URL":"https:\/\/doi.org\/10.1109\/icassp40776.2020.9052956","relation":{},"subject":[],"published":{"date-parts":[[2020,5]]}}}