{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,2,21]],"date-time":"2025-02-21T01:13:32Z","timestamp":1740100412361,"version":"3.37.3"},"reference-count":38,"publisher":"IEEE","license":[{"start":{"date-parts":[[2021,8,23]],"date-time":"2021-08-23T00:00:00Z","timestamp":1629676800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2021,8,23]],"date-time":"2021-08-23T00:00:00Z","timestamp":1629676800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/100010663","name":"European Research Council","doi-asserted-by":"publisher","award":["ERC-2019-STG-850925"],"award-info":[{"award-number":["ERC-2019-STG-850925"]}],"id":[{"id":"10.13039\/100010663","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2021,8,23]]},"DOI":"10.23919\/eusipco54536.2021.9616050","type":"proceedings-article","created":{"date-parts":[[2021,12,8]],"date-time":"2021-12-08T21:55:53Z","timestamp":1639000553000},"page":"2069-2073","source":"Crossref","is-referenced-by-count":1,"title":["Stochastic Majorize-Minimize Subspace Algorithm with Application to Binary Classification"],"prefix":"10.23919","author":[{"given":"Jean-Baptiste","family":"Fest","sequence":"first","affiliation":[]},{"given":"Emilie","family":"Chouzenoux","sequence":"additional","affiliation":[]}],"member":"263","reference":[{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1007\/s10994-007-5022-x"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.1007\/s00245-019-09617-7"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.1016\/B978-0-12-604550-5.50015-8"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1214\/aoms\/1177729586"},{"key":"ref30","article-title":"Seboost-boosting stochastic learning using subspace optimization techniques","author":"richardson","year":"2016","journal-title":"ArXiv Preprint"},{"key":"ref37","article-title":"RMSProp: Divide the gradient by a running average of its recent magnitude","volume":"6","author":"tieleman","year":"2012","journal-title":"COURSERA Neural Networks for Machine Learning"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1109\/TSP.2016.2601299"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1137\/0910004"},{"key":"ref34","doi-asserted-by":"publisher","DOI":"10.1007\/s11063-017-9603-9"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1109\/LSP.2016.2593589"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/TSP.2017.2709265"},{"key":"ref12","first-page":"13","article-title":"Stochastic approximations and perturbations in forward-backward splitting for monotone operators","volume":"1","author":"combettes","year":"2016","journal-title":"Pure and Applied Functional Analysis"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1214\/aos\/1018031103"},{"key":"ref14","article-title":"Adaptive subgradient methods for online learning and stochastic optimization","volume":"12","author":"duchi","year":"2011","journal-title":"Journal of Machine Learning Research"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.23919\/EUSIPCO.2017.8081215"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/42.802758"},{"key":"ref17","first-page":"24","article-title":"The method of stochastic gradients and its application","author":"ermoliev","year":"1967","journal-title":"Seminar Theory of Optimal Solutions No 1 (Russian)"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2007.909318"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1016\/j.sigpro.2013.09.026"},{"key":"ref4","first-page":"1737","article-title":"SGD-QN: Careful quasi-Newton stochastic gradient descent","volume":"10","author":"bordes","year":"2009","journal-title":"Journal on Machine Learning Research"},{"key":"ref28","first-page":"543","article-title":"A method for solving the convex programming problem with convergence rate O (1\/k2)","volume":"269","author":"nesterov","year":"1983","journal-title":"Dokl Akad Nauk SSSR"},{"key":"ref27","article-title":"Stochastic majorization-minimization algorithms for large-scale optimization","author":"mairal","year":"2013","journal-title":"ArXiv Preprint"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1137\/S1052623497331063"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1137\/140954362"},{"key":"ref5","article-title":"Efficient bounds for the softmax function and applications to approximate inference in hybrid models","volume":"31","author":"bouchard","year":"2008","journal-title":"Proceedings of the Neural Information Processing Systems (NIPS 2008)"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1109\/TSP.2019.2955829"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2010.2103083"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1145\/1961189.1961199"},{"key":"ref2","first-page":"x-x+s","article-title":"Non-asymptotic analysis of stochastic approximation algorithms for machine learning","author":"bach","year":"2011","journal-title":"Proceedings of the Annual Conference on Neural Information Processing Systems (NIPS 2011)"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1137\/11085997X"},{"key":"ref1","first-page":"1","article-title":"On perturbed proximal gradient algorithms","volume":"18","author":"atchad\u00e9","year":"2017","journal-title":"Journal on Machine Learning Research"},{"key":"ref20","article-title":"Stochastic optimization algorithms, non asymptotic and asymptotic behaviour","author":"gadat","year":"2017","journal-title":"Lecture notes"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1109\/83.392335"},{"journal-title":"Asymptotic study of stochastic adaptive algorithm in non-convex landscape","year":"2021","author":"gadat","key":"ref21"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1117\/12.2527881"},{"key":"ref23","first-page":"9633","article-title":"Understanding the role of momentum in stochastic gradient methods","author":"gitman","year":"2019","journal-title":"Advances in neural information processing systems"},{"key":"ref26","article-title":"Adam: A method for stochastic optimization","author":"kingma","year":"2014","journal-title":"ArXiv Preprint"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1007\/s10915-020-01295-w"}],"event":{"name":"2021 29th European Signal Processing Conference (EUSIPCO)","start":{"date-parts":[[2021,8,23]]},"location":"Dublin, Ireland","end":{"date-parts":[[2021,8,27]]}},"container-title":["2021 29th European Signal Processing Conference (EUSIPCO)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/9615915\/9615917\/09616050.pdf?arnumber=9616050","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,3,21]],"date-time":"2022-03-21T20:57:23Z","timestamp":1647896243000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9616050\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2021,8,23]]},"references-count":38,"URL":"https:\/\/doi.org\/10.23919\/eusipco54536.2021.9616050","relation":{},"subject":[],"published":{"date-parts":[[2021,8,23]]}}}