{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,26]],"date-time":"2026-03-26T14:33:45Z","timestamp":1774535625371,"version":"3.50.1"},"reference-count":50,"publisher":"Springer Science and Business Media LLC","issue":"1","license":[{"start":{"date-parts":[[2025,7,17]],"date-time":"2025-07-17T00:00:00Z","timestamp":1752710400000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2025,7,17]],"date-time":"2025-07-17T00:00:00Z","timestamp":1752710400000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"funder":[{"name":"the NSF of Chongqing","award":["CSTB2024NSCQ-MSX1282"],"award-info":[{"award-number":["CSTB2024NSCQ-MSX1282"]}]}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["J Optim Theory Appl"],"published-print":{"date-parts":[[2025,10]]},"DOI":"10.1007\/s10957-025-02771-9","type":"journal-article","created":{"date-parts":[[2025,7,17]],"date-time":"2025-07-17T04:24:20Z","timestamp":1752726260000},"update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":2,"title":["Non-Asymptotic Analysis of Hybrid SPG for Non-Convex Stochastic Composite Optimization"],"prefix":"10.1007","volume":"207","author":[{"given":"Yue-Hong","family":"He","sequence":"first","affiliation":[]},{"given":"Gao-Xi","family":"Li","sequence":"additional","affiliation":[]},{"given":"Xian-Jun","family":"Long","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2025,7,17]]},"reference":[{"issue":"1","key":"2771_CR1","doi-asserted-by":"publisher","first-page":"91","DOI":"10.1007\/s10107-011-0484-9","volume":"137","author":"H Attouch","year":"2013","unstructured":"Attouch, H., Bolte, J., Svaiter, B.F.: Convergence of descent methods for semi-algebraic and tame problems: proximal algorithms, forward-backward splitting, and regularized Gauss-Seidel methods. Math. Program. 137(1), 91\u2013129 (2013)","journal-title":"Math. Program."},{"issue":"9","key":"2771_CR2","first-page":"142","volume":"17","author":"L Bottou","year":"1998","unstructured":"Bottou, L.: Online learning and stochastic approximations. Online Learning in Neural Networks, Cambridge University Press, Cambridge. 17(9), 142 (1998)","journal-title":"Online Learning in Neural Networks, Cambridge University Press, Cambridge."},{"key":"2771_CR3","doi-asserted-by":"crossref","unstructured":"Bottou, L.: Large-scale machine learning with stochastic gradient descent. In Proceedings of COMPSTAT: 19th International Conference on Computational Statistics, Paris France, pp. 177-186 (2010)","DOI":"10.1007\/978-3-7908-2604-3_16"},{"key":"2771_CR4","unstructured":"Cutkosky, A., Orabona, F.: Momentum-based variance reduction in non-convex SGD. Advances in Neural Information Processing Systems, pp. 15236-15245 (2019)"},{"issue":"2","key":"2771_CR5","doi-asserted-by":"publisher","first-page":"42","DOI":"10.1007\/s10915-022-01897-6","volume":"92","author":"W Cheng","year":"2022","unstructured":"Cheng, W., Wang, X., Chen, X.: An interior stochastic gradient method for a class of non-Lipschitz optimization problems. J. Sci. Comput. 92(2), 42 (2022)","journal-title":"J. Sci. Comput."},{"issue":"1","key":"2771_CR6","doi-asserted-by":"publisher","first-page":"31","DOI":"10.1016\/j.jvcir.2012.10.006","volume":"24","author":"W Cao","year":"2013","unstructured":"Cao, W., Sun, J., Xu, Z.: Fast image deconvolution using closed-form thresholding formulas of $$l_q$$ ($$q= 1\/2, 2\/3$$) regularization. J. Vis. Commun. Image Represent. 24(1), 31\u201341 (2013)","journal-title":"J. Vis. Commun. Image Represent."},{"issue":"1","key":"2771_CR7","doi-asserted-by":"publisher","first-page":"207","DOI":"10.1137\/18M1178244","volume":"29","author":"D Davis","year":"2019","unstructured":"Davis, D., Drusvyatskiy, D.: Stochastic model-based minimization of weakly convex functions. SIAM J. Optim. 29(1), 207\u2013239 (2019)","journal-title":"SIAM J. Optim."},{"key":"2771_CR8","first-page":"1646","volume":"27","author":"A Defazio","year":"2014","unstructured":"Defazio, A., Bach, F., Lacoste-Julien, S.: SAGA: A fast incremental gradient method with support for non-strongly convex composite objectives. Adv. Neural. Inf. Process. Syst. 27, 1646\u20131654 (2014)","journal-title":"Adv. Neural. Inf. Process. Syst."},{"issue":"456","key":"2771_CR9","doi-asserted-by":"publisher","first-page":"1348","DOI":"10.1198\/016214501753382273","volume":"96","author":"JQ Fan","year":"2001","unstructured":"Fan, J.Q., Li, R.: Variable selection via nonconcave penalized likelihood and its oracle properties. J. Am. Stat. Assoc. 96(456), 1348\u20131360 (2001)","journal-title":"J. Am. Stat. Assoc."},{"key":"2771_CR10","unstructured":"Fang, C., Li, C.J., Lin, Z., Zhang, T.: SPIDER: near-optimal non-convex optimization via stochastic path integrated differential estimator. Advances in Neural Information Processing Systems, pp. 687-697 (2018)"},{"issue":"3","key":"2771_CR11","doi-asserted-by":"publisher","first-page":"65","DOI":"10.1007\/s11222-023-10230-6","volume":"33","author":"G Fort","year":"2023","unstructured":"Fort, G., Moulines, E.: Stochastic variable metric proximal gradient with variance reduction for non-convex composite optimization. Stat. Comput. 33(3), 65 (2023)","journal-title":"Stat. Comput."},{"key":"2771_CR12","volume-title":"Deep Learning","author":"I Goodfellow","year":"2016","unstructured":"Goodfellow, I., Bengio, Y., Courville, A.: Deep Learning. MIT Press, Cambridge (2016)"},{"issue":"1\u20132","key":"2771_CR13","doi-asserted-by":"publisher","first-page":"59","DOI":"10.1007\/s10107-015-0871-8","volume":"156","author":"S Ghadimi","year":"2016","unstructured":"Ghadimi, S., Lan, G.: Accelerated gradient methods for nonconvex nonlinear and stochastic programming. Math. Program. 156(1\u20132), 59\u201399 (2016)","journal-title":"Math. Program."},{"issue":"4","key":"2771_CR14","doi-asserted-by":"publisher","first-page":"2341","DOI":"10.1137\/120880811","volume":"23","author":"S Ghadimi","year":"2013","unstructured":"Ghadimi, S., Lan, G.: Stochastic first and zeroth-order methods for nonconvex stochastic programming. SIAM J. Optim. 23(4), 2341\u20132368 (2013)","journal-title":"SIAM J. Optim."},{"key":"2771_CR15","doi-asserted-by":"publisher","first-page":"771","DOI":"10.1016\/j.ins.2022.11.133","volume":"622","author":"D Ghosh","year":"2023","unstructured":"Ghosh, D., Mesiar, R., Yao, H.R., Chauhan, R.S.: Generalized-Hukuhara subdifferential analysis and its application in nonconvex composite interval optimization problems. Inform. Sci. 622, 771\u2013793 (2023)","journal-title":"Inform. Sci."},{"key":"2771_CR16","unstructured":"Guo, J.H., Wang, X., Xiao, X.T.: Preconditioned primal-dual gradient methods for nonconvex composite and finite-sum optimization. arXiv:2309.13416 (2023)"},{"key":"2771_CR17","doi-asserted-by":"publisher","first-page":"119546","DOI":"10.1016\/j.ins.2023.119546","volume":"648","author":"L He","year":"2023","unstructured":"He, L., Ye, J., Jianwei, E.: Nonconvex optimization with inertial proximal stochastic variance reduction gradient. Inform. Sci. 648, 119546 (2023)","journal-title":"Inform. Sci."},{"issue":"4","key":"2771_CR18","first-page":"713","volume":"18","author":"YH He","year":"2022","unstructured":"He, Y.H., Long, X.J.: A variance-based proximal backward-forward algorithm with line search for stochastic mixed variational inequalities. Pac. J. Optim. 18(4), 713\u2013735 (2022)","journal-title":"Pac. J. Optim."},{"key":"2771_CR19","first-page":"315","volume":"26","author":"R Johnson","year":"2013","unstructured":"Johnson, R., Zhang, T.: Accelerating stochastic gradient descent using predictive variance reduction. Adv. Neural. Inf. Process. Syst. 26, 315\u2013323 (2013)","journal-title":"Adv. Neural. Inf. Process. Syst."},{"key":"2771_CR20","first-page":"5564","volume":"31","author":"Z Li","year":"2018","unstructured":"Li, Z., Li, J.: A simple proximal stochastic gradient method for nonsmooth nonconvex optimization. Adv. Neural. Inf. Process. Syst. 31, 5564\u20135574 (2018)","journal-title":"Adv. Neural. Inf. Process. Syst."},{"key":"2771_CR21","unstructured":"Li, Q., Zhou, Y., Liang, Y., Varshney, P.K.: Convergence analysis of proximal gradient with momentum for nonconvex optimization. In: 34th International Conference on Machine Learning, ICML. 70, 3341-3357 (2017)"},{"key":"2771_CR22","doi-asserted-by":"publisher","first-page":"114786","DOI":"10.1016\/j.cam.2022.114786","volume":"420","author":"XJ Long","year":"2023","unstructured":"Long, X.J., He, Y.H.: A fast stochastic approximation-based subgradient extragradient algorithm with variance reduction for solving stochastic variational inequality problems. J. Comput. Appl. Math. 420, 114786 (2023)","journal-title":"J. Comput. Appl. Math."},{"key":"2771_CR23","doi-asserted-by":"publisher","first-page":"116381","DOI":"10.1016\/j.cam.2024.116381","volume":"459","author":"XJ Long","year":"2025","unstructured":"Long, X.J., Yang, J.: A stochastic Bregman golden ratio algorithm for non-Lipschitz stochastic mixed variational inequalities with application to resource share problems. J. Comput. Appl. Math. 459, 116381 (2025)","journal-title":"J. Comput. Appl. Math."},{"issue":"6","key":"2771_CR24","doi-asserted-by":"publisher","first-page":"1321","DOI":"10.1080\/02331934.2024.2312198","volume":"74","author":"XJ Long","year":"2025","unstructured":"Long, X.J., Yang, J., Yang, Z.P.: Stochastic Bregman extragradient algorithm with line search for stochastic mixed variational inequalities. Optimization 74(6), 1321\u20131353 (2025)","journal-title":"Optimization"},{"issue":"2","key":"2771_CR25","doi-asserted-by":"publisher","first-page":"669","DOI":"10.1007\/s10589-023-00504-0","volume":"86","author":"GH Lin","year":"2023","unstructured":"Lin, G.H., Yang, Z.P., Yin, H.A., Zhang, J.: A dual-based stochastic inexact algorithm for a class of stochastic nonsmooth convex composite problems. Comput. Optim. Appl. 86(2), 669\u2013710 (2023)","journal-title":"Comput. Optim. Appl."},{"key":"2771_CR26","unstructured":"Metel, M., Takeda, A.: Simple stochastic gradient methods for non-smooth non-convex regularized optimization. In: International Conference on Machine Learning, pp. 4537-4545 (2019)"},{"issue":"3","key":"2771_CR27","doi-asserted-by":"publisher","first-page":"561","DOI":"10.1007\/s10589-022-00375-x","volume":"82","author":"LM Nguyen","year":"2022","unstructured":"Nguyen, L.M., van Dijk, M., Phan, D.T., Nguyen, P.H., Weng, T.W., Kalagnanam, J.R.: Finite-sum smooth optimization with SARAH. Comput. Optim. Appl. 82(3), 561\u2013593 (2022)","journal-title":"Comput. Optim. Appl."},{"key":"2771_CR28","unstructured":"Nguyen, L.M., Liu, J., Scheinberg, K., Takac, M.: SARAH: A novel method for machine learning problems using stochastic recursive gradient. In: International Conference on Machine Learning, pp. 2613-2621 (2017)"},{"issue":"1","key":"2771_CR29","doi-asserted-by":"publisher","first-page":"237","DOI":"10.1080\/10556788.2020.1818081","volume":"36","author":"LM Nguyen","year":"2021","unstructured":"Nguyen, L.M., Scheinberg, K., Takac, M.: Inexact SARAH algorithm for stochastic optimization. Optim. Methods Softw. 36(1), 237\u2013258 (2021)","journal-title":"Optim. Methods Softw."},{"issue":"1","key":"2771_CR30","first-page":"4455","volume":"21","author":"HN Pham","year":"2020","unstructured":"Pham, H.N., Nguyen, M.L., Phan, T.D., Tran-Dinh, Q.: ProxSARAH: an efficient algorithmic framework for stochastic composite nonconvex optimization. J. Mach. Learn. Res. 21(1), 4455\u20134502 (2020)","journal-title":"J. Mach. Learn. Res."},{"issue":"3","key":"2771_CR31","doi-asserted-by":"publisher","first-page":"400","DOI":"10.1214\/aoms\/1177729586","volume":"22","author":"H Robbins","year":"1951","unstructured":"Robbins, H., Monro, S.: A stochastic approximation method. Ann. Statist. 22(3), 400\u2013407 (1951)","journal-title":"Ann. Statist."},{"key":"2771_CR32","unstructured":"Reddi, S.J., Sra, S., P\u00f3czos, B., Smola, A.J.: Proximal stochastic methods for nonsmooth nonconvex finite-sum optimization. Advances in Neural Information Processing Systems, pp. 1145-1153 (2016)"},{"key":"2771_CR33","volume-title":"Variational Analysis","author":"RT Rockafellar","year":"2009","unstructured":"Rockafellar, R.T., Wets, R.J.B.: Variational Analysis. Springer, Berlin (2009)"},{"key":"2771_CR34","doi-asserted-by":"publisher","DOI":"10.7551\/mitpress\/8996.001.0001","volume-title":"Optimization for Machine Learning","author":"S Sra","year":"2011","unstructured":"Sra, S., Nowozin, S., Wright, S.J.: Optimization for Machine Learning. MIT Press, Cambridge (2011)"},{"issue":"2","key":"2771_CR35","doi-asserted-by":"publisher","first-page":"1005","DOI":"10.1007\/s10107-020-01583-1","volume":"191","author":"Q Tran-Dinh","year":"2022","unstructured":"Tran-Dinh, Q., Pham, N.H., Phan, D.T., Nguyen, L.M.: A hybrid stochastic optimization framework for composite nonconvex optimization. Math. Program. 191(2), 1005\u20131071 (2022)","journal-title":"Math. Program."},{"key":"2771_CR36","first-page":"11096","volume":"33","author":"Q Tran Dinh","year":"2020","unstructured":"Tran Dinh, Q., Liu, D., Nguyen, L.: Hybrid variance-reduced SGD algorithms for minimax problems with nonconvex-linear function. Adv. Neural. Inf. Process. Syst. 33, 11096\u201311107 (2020)","journal-title":"Adv. Neural. Inf. Process. Syst."},{"key":"2771_CR37","first-page":"2406","volume":"32","author":"Z Wang","year":"2019","unstructured":"Wang, Z., Ji, K., Zhou, Y., Liang, Y., Tarokh, V.: Spiderboost and momentum: faster variance reduction algorithms. Adv. Neural. Inf. Process. Syst. 32, 2406\u20132416 (2019)","journal-title":"Adv. Neural. Inf. Process. Syst."},{"key":"2771_CR38","doi-asserted-by":"publisher","first-page":"69883","DOI":"10.1109\/ACCESS.2018.2880454","volume":"6","author":"F Wen","year":"2018","unstructured":"Wen, F., Chu, L., Liu, P.L., Qiu, R.C.: A survey on nonconvex regularization-based sparse and low-rank recovery in signal processing, statistics, and machine learning. IEEE Access. 6, 69883\u201369906 (2018)","journal-title":"IEEE Access."},{"key":"2771_CR39","doi-asserted-by":"publisher","first-page":"617","DOI":"10.1007\/s10898-020-00943-7","volume":"79","author":"Z Wu","year":"2021","unstructured":"Wu, Z., Li, C., Li, M., Lim, A.: Inertial proximal gradient methods with Bregman regularization for a class of nonconvex optimization problems. J. Glob. Optim. 79, 617\u2013644 (2021)","journal-title":"J. Glob. Optim."},{"key":"2771_CR40","doi-asserted-by":"publisher","first-page":"1676","DOI":"10.1137\/22M1482822","volume":"33","author":"Y Wu","year":"2023","unstructured":"Wu, Y., Pan, S., Yang, X.: A regularized newton method for $$l_q$$-norm composite optimization problems. SIAM J. Optim. 33, 1676\u20131706 (2023)","journal-title":"SIAM J. Optim."},{"key":"2771_CR41","first-page":"2630","volume":"32","author":"Y Xu","year":"2019","unstructured":"Xu, Y., Jin, R., Yang, T.: Non-asymptotic analysis of stochastic methods for non-smooth non-convex regularized problems. Adv. Neural. Inf. Process. Syst. 32, 2630\u20132640 (2019)","journal-title":"Adv. Neural. Inf. Process. Syst."},{"issue":"1","key":"2771_CR42","doi-asserted-by":"publisher","first-page":"266","DOI":"10.1007\/s10957-022-02132-w","volume":"196","author":"Y Xu","year":"2023","unstructured":"Xu, Y., Xu, Y.: Momentum-based variance-reduced proximal stochastic gradient method for composite nonconvex stochastic optimization. J. Optim. Theory Appl. 196(1), 266\u2013297 (2023)","journal-title":"J. Optim. Theory Appl."},{"key":"2771_CR43","unstructured":"Xu, Y., Qi, Q., Lin, Q., Jin, R., Yang, T.: Stochastic optimization for DC functions and non-smooth non-convex regularizers with non-asymptotic convergence. International Conference on Machine Learning, PMLR, pp. 6942-6951 (2019)"},{"issue":"3","key":"2771_CR44","doi-asserted-by":"publisher","first-page":"605","DOI":"10.1007\/s10957-020-01799-3","volume":"188","author":"X Xiao","year":"2021","unstructured":"Xiao, X.: A unified convergence analysis of stochastic Bregman proximal gradient and extragradient methods. J. Optim. Theory Appl. 188(3), 605\u2013627 (2021)","journal-title":"J. Optim. Theory Appl."},{"issue":"7","key":"2771_CR45","doi-asserted-by":"publisher","first-page":"1013","DOI":"10.1109\/TNNLS.2012.2197412","volume":"23","author":"Z Xu","year":"2012","unstructured":"Xu, Z., Chang, X., Xu, F., Zhang, H.: $$ L_ 1\/2 $$ regularization: A thresholding representation theory and a fast solver. IEEE Trans. Neural Netw. Learn. Syst. 23(7), 1013\u20131027 (2012)","journal-title":"IEEE Trans. Neural Netw. Learn. Syst."},{"key":"2771_CR46","doi-asserted-by":"publisher","first-page":"115425","DOI":"10.1016\/j.cam.2023.115425","volume":"436","author":"ZP Yang","year":"2024","unstructured":"Yang, Z.P., Zhao, Y.: Hybrid SGD algorithms to solve stochastic composite optimization problems. J. Comput. Appl. Math. 436, 115425 (2024)","journal-title":"J. Comput. Appl. Math."},{"key":"2771_CR47","doi-asserted-by":"publisher","first-page":"894","DOI":"10.1214\/09-AOS729","volume":"38","author":"CH Zhang","year":"2010","unstructured":"Zhang, C.H.: Nearly unbiased variable selection under minimax concave penalty. Ann. Statist. 38, 894\u2013942 (2010)","journal-title":"Ann. Statist."},{"key":"2771_CR48","first-page":"1","volume":"27","author":"X Zhao","year":"2024","unstructured":"Zhao, X., Ghosh, D., Qin, X., Tammer, C., Yao, J.C.: On the convergence analysis of a proximal gradient method for multiobjective optimization. TOP 27, 1\u201331 (2024)","journal-title":"TOP"},{"issue":"3","key":"2771_CR49","doi-asserted-by":"publisher","first-page":"43","DOI":"10.1007\/s11117-024-01057-0","volume":"28","author":"X Zhao","year":"2024","unstructured":"Zhao, X., Ji, H., Ghosh, D., Yao, J.C.: An inexact proximal point method with quasi-distance for quasi-convex multiobjective optimization. Positivity 28(3), 43 (2024)","journal-title":"Positivity"},{"key":"2771_CR50","first-page":"1","volume":"10","author":"X Zhao","year":"2025","unstructured":"Zhao, X., Raushan, R., Ghosh, D., Yao, J.C., Qi, M.: Proximal gradient method for convex multiobjective optimization problems without Lipschitz continuous gradients. Comput. Optim. Appl. 10, 1\u201340 (2025)","journal-title":"Comput. Optim. Appl."}],"container-title":["Journal of Optimization Theory and Applications"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s10957-025-02771-9.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s10957-025-02771-9\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s10957-025-02771-9.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,9,7]],"date-time":"2025-09-07T15:26:31Z","timestamp":1757258791000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s10957-025-02771-9"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,7,17]]},"references-count":50,"journal-issue":{"issue":"1","published-print":{"date-parts":[[2025,10]]}},"alternative-id":["2771"],"URL":"https:\/\/doi.org\/10.1007\/s10957-025-02771-9","relation":{},"ISSN":["0022-3239","1573-2878"],"issn-type":[{"value":"0022-3239","type":"print"},{"value":"1573-2878","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025,7,17]]},"assertion":[{"value":"20 February 2025","order":1,"name":"received","label":"Received","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"24 June 2025","order":2,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"17 July 2025","order":3,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}}],"article-number":"19"}}