{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,5,5]],"date-time":"2026-05-05T07:55:36Z","timestamp":1777967736777,"version":"3.51.4"},"publisher-location":"Cham","reference-count":59,"publisher":"Springer International Publishing","isbn-type":[{"value":"9783030910587","type":"print"},{"value":"9783030910594","type":"electronic"}],"license":[{"start":{"date-parts":[[2021,1,1]],"date-time":"2021-01-01T00:00:00Z","timestamp":1609459200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springer.com\/tdm"},{"start":{"date-parts":[[2021,1,1]],"date-time":"2021-01-01T00:00:00Z","timestamp":1609459200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springer.com\/tdm"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2021]]},"DOI":"10.1007\/978-3-030-91059-4_2","type":"book-chapter","created":{"date-parts":[[2021,11,4]],"date-time":"2021-11-04T15:02:57Z","timestamp":1636038177000},"page":"20-37","update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":2,"title":["Adaptive Catalyst for Smooth Convex Optimization"],"prefix":"10.1007","author":[{"given":"Anastasiya","family":"Ivanova","sequence":"first","affiliation":[]},{"given":"Dmitry","family":"Pasechnyuk","sequence":"additional","affiliation":[]},{"given":"Dmitry","family":"Grishchenko","sequence":"additional","affiliation":[]},{"given":"Egor","family":"Shulgin","sequence":"additional","affiliation":[]},{"given":"Alexander","family":"Gasnikov","sequence":"additional","affiliation":[]},{"given":"Vladislav","family":"Matyukhin","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2021,11,5]]},"reference":[{"key":"2_CR1","unstructured":"Allen-Zhu, Z., Hazan, E.: Optimal black-box reductions between optimization objectives. arXiv preprint arXiv:1603.05642 (2016)"},{"key":"2_CR2","doi-asserted-by":"crossref","unstructured":"Bayandina, A., Gasnikov, A., Lagunovskaya, A.: Gradient-free two-points optimal method for non smooth stochastic convex optimization problem with additional small noise. Autom. Rem. Contr. 79(7) (2018). arXiv:1701.03821","DOI":"10.1134\/S0005117918080039"},{"key":"2_CR3","doi-asserted-by":"crossref","unstructured":"Beck, A.: First-order methods in optimization, vol. 25. SIAM (2017)","DOI":"10.1137\/1.9781611974997"},{"key":"2_CR4","doi-asserted-by":"crossref","unstructured":"Bubeck, S.: Convex optimization: algorithms and complexity. Found. Trends\u00ae Mach. Learn. 8(3\u20134), 231\u2013357 (2015)","DOI":"10.1561\/2200000050"},{"issue":"7","key":"2_CR5","doi-asserted-by":"publisher","first-page":"1185","DOI":"10.1007\/s11590-016-1087-4","volume":"11","author":"E De Klerk","year":"2017","unstructured":"De Klerk, E., Glineur, F., Taylor, A.B.: On the worst-case complexity of the gradient method with exact line search for smooth strongly convex functions. Optim. Lett. 11(7), 1185\u20131199 (2017)","journal-title":"Optim. Lett."},{"key":"2_CR6","unstructured":"Diakonikolas, J., Orecchia, L.: Alternating randomized block coordinate descent. arXiv preprint arXiv:1805.09185 (2018)"},{"key":"2_CR7","unstructured":"Diakonikolas, J., Orecchia, L.: Conjugate gradients and accelerated methods unified: the approximate duality gap view. arXiv preprint arXiv:1907.00289 (2019)"},{"issue":"4","key":"2_CR8","doi-asserted-by":"publisher","first-page":"3146","DOI":"10.1137\/19M130769X","volume":"30","author":"N Doikov","year":"2020","unstructured":"Doikov, N., Nesterov, Y.: Contracting proximal methods for smooth convex optimization. SIAM J. Optim. 30(4), 3146\u20133169 (2020)","journal-title":"SIAM J. Optim."},{"key":"2_CR9","unstructured":"Doikov, N., Nesterov, Y.: Inexact tensor methods with dynamic accuracies. arXiv preprint arXiv:2002.09403 (2020)"},{"issue":"5","key":"2_CR10","doi-asserted-by":"publisher","first-page":"2788","DOI":"10.1109\/TIT.2015.2409256","volume":"61","author":"JC Duchi","year":"2015","unstructured":"Duchi, J.C., Jordan, M.I., Wainwright, M.J., Wibisono, A.: Optimal rates for zero-order convex optimization: the power of two function evaluations. IEEE Trans. Inf. Theory 61(5), 2788\u20132806 (2015)","journal-title":"IEEE Trans. Inf. Theory"},{"key":"2_CR11","doi-asserted-by":"crossref","unstructured":"Dvinskikh, D., et al.: Accelerated meta-algorithm for convex optimization. Comput. Math. Math. Phys. 61(1), 17\u201328 (2021)","DOI":"10.1134\/S096554252101005X"},{"key":"2_CR12","doi-asserted-by":"publisher","first-page":"244","DOI":"10.1134\/S1064562420030084","volume":"101","author":"D Dvinskikh","year":"2020","unstructured":"Dvinskikh, D., Omelchenko, S., Gasnikov, A., Tyurin, A.: Accelerated gradient sliding for minimizing a sum of functions. Doklady Math. 101, 244\u2013246 (2020)","journal-title":"Doklady Math."},{"key":"2_CR13","unstructured":"Dvurechensky, P., Gasnikov, A., Gorbunov, E.: An accelerated directional derivative method for smooth stochastic convex optimization. arXiv:1804.02394 (2018)"},{"key":"2_CR14","unstructured":"Dvurechensky, P., Gasnikov, A., Gorbunov, E.: An accelerated method for derivative-free smooth stochastic convex optimization. arXiv:1802.09022 (2018)"},{"issue":"4","key":"2_CR15","doi-asserted-by":"publisher","first-page":"1997","DOI":"10.1137\/130949993","volume":"25","author":"O Fercoq","year":"2015","unstructured":"Fercoq, O., Richt\u00e1rik, P.: Accelerated, parallel, and proximal coordinate descent. SIAM J. Optim. 25(4), 1997\u20132023 (2015)","journal-title":"SIAM J. Optim."},{"key":"2_CR16","volume-title":"Universal Gradient Descent","author":"A Gasnikov","year":"2021","unstructured":"Gasnikov, A.: Universal Gradient Descent. MCCME, Moscow (2021)"},{"key":"2_CR17","doi-asserted-by":"publisher","unstructured":"Gasnikov, A., Lagunovskaya, A., Usmanova, I., Fedorenko, F.: Gradient-free proximal methods with inexact oracle for convex stochastic nonsmooth optimization problems on the simplex. Autom. Rem. Contr. 77(11), 2018\u20132034 (2016). https:\/\/doi.org\/10.1134\/S0005117916110114. http:\/\/dx.doi.org\/10.1134\/S0005117916110114. arXiv:1412.3890","DOI":"10.1134\/S0005117916110114"},{"key":"2_CR18","unstructured":"Gasnikov, A.: Universal gradient descent. arXiv preprint arXiv:1711.00394 (2017)"},{"key":"2_CR19","unstructured":"Gasnikov, A., et al.: Near optimal methods for minimizing convex functions with lipschitz $$ p $$-th derivatives. In: Conference on Learning Theory, pp. 1392\u20131393 (2019)"},{"key":"2_CR20","unstructured":"Gasnikov, A., Dvurechensky, P., Usmanova, I.: On accelerated randomized methods. Proc. Moscow Inst. Phys. Technol. 8(2), 67\u2013100 (2016). (in Russian), first appeared in arXiv:1508.02182"},{"key":"2_CR21","doi-asserted-by":"crossref","unstructured":"Gasnikov, A., Gorbunov, E., Kovalev, D., Mokhammed, A., Chernousova, E.: Reachability of optimal convergence rate estimates for high-order numerical convex optimization methods. Doklady Math. 99, 91\u201394 (2019)","DOI":"10.1134\/S1064562419010289"},{"key":"2_CR22","unstructured":"Gazagnadou, N., Gower, R.M., Salmon, J.: Optimal mini-batch and step sizes for saga. arXiv preprint arXiv:1902.00071 (2019)"},{"key":"2_CR23","unstructured":"Gorbunov, E., Hanzely, F., Richtarik, P.: A unified theory of SGD: variance reduction, sampling, quantization and coordinate descent (2019)"},{"key":"2_CR24","unstructured":"Gower, R.M., Loizou, N., Qian, X., Sailanbayev, A., Shulgin, E., Richt\u00e1rik, P.: SGD: general analysis and improved rates. arXiv preprint arXiv:1901.09401 (2019)"},{"key":"2_CR25","unstructured":"Guminov, S., Dvurechensky, P., Gasnikov, A.: Accelerated alternating minimization. arXiv preprint arXiv:1906.03622 (2019)"},{"key":"2_CR26","unstructured":"Hendrikx, H., Bach, F., Massouli\u00e9, L.: Dual-free stochastic decentralized optimization with variance reduction. In: Advances in Neural Information Processing Systems, vol. 33 (2020)"},{"key":"2_CR27","unstructured":"Ivanova, A., et al.: Oracle complexity separation in convex optimization. arXiv preprint arXiv:2002.02706 (2020)"},{"key":"2_CR28","unstructured":"Ivanova, A., Pasechnyuk, D., Grishchenko, D., Shulgin, E., Gasnikov, A., Matyukhin, V.: Adaptive catalyst for smooth convex optimization. arXiv preprint arXiv:1911.11271 (2019)"},{"key":"2_CR29","series-title":"Lecture Notes in Computer Science","doi-asserted-by":"publisher","first-page":"166","DOI":"10.1007\/978-3-030-62867-3_13","volume-title":"Optimization and Applications","author":"D Kamzolov","year":"2020","unstructured":"Kamzolov, D., Gasnikov, A., Dvurechensky, P.: Optimal combination of tensor optimization methods. In: Olenev, N., Evtushenko, Y., Khachay, M., Malkova, V. (eds.) OPTIMA 2020. LNCS, vol. 12422, pp. 166\u2013183. Springer, Cham (2020). https:\/\/doi.org\/10.1007\/978-3-030-62867-3_13"},{"key":"2_CR30","doi-asserted-by":"crossref","unstructured":"Kamzolov, D., Gasnikov, A.: Near-optimal hyperfast second-order method for convex optimization and its sliding. arXiv preprint arXiv:2002.09050 (2020)","DOI":"10.1007\/978-3-030-58657-7_15"},{"key":"2_CR31","series-title":"Lecture Notes in Computer Science (Lecture Notes in Artificial Intelligence)","doi-asserted-by":"publisher","first-page":"795","DOI":"10.1007\/978-3-319-46128-1_50","volume-title":"Machine Learning and Knowledge Discovery in Databases","author":"H Karimi","year":"2016","unstructured":"Karimi, H., Nutini, J., Schmidt, M.: Linear convergence of gradient and proximal-gradient methods under the Polyak-\u0141ojasiewicz condition. In: Frasconi, P., Landwehr, N., Manco, G., Vreeken, J. (eds.) ECML PKDD 2016. LNCS (LNAI), vol. 9851, pp. 795\u2013811. Springer, Cham (2016). https:\/\/doi.org\/10.1007\/978-3-319-46128-1_50"},{"key":"2_CR32","unstructured":"Karimireddy, S.P., Kale, S., Mohri, M., Reddi, S.J., Stich, S.U., Suresh, A.T.: Scaffold: stochastic controlled averaging for federated learning. arXiv preprint arXiv:1910.06378 (2019)"},{"key":"2_CR33","unstructured":"Kovalev, D., Salim, A., Richt\u00e1rik, P.: Optimal and practical algorithms for smooth and strongly convex decentralized optimization. In: Advances in Neural Information Processing Systems, vol. 33 (2020)"},{"key":"2_CR34","unstructured":"Kulunchakov, A., Mairal, J.: A generic acceleration framework for stochastic composite optimization. arXiv preprint arXiv:1906.01164 (2019)"},{"key":"2_CR35","doi-asserted-by":"crossref","unstructured":"Li, H., Lin, Z.: Revisiting extra for smooth distributed optimization. arXiv preprint arXiv:2002.10110 (2020)","DOI":"10.1137\/18M122902X"},{"key":"2_CR36","unstructured":"Li, H., Lin, Z., Fang, Y.: Optimal accelerated variance reduced extra and diging for strongly convex and smooth decentralized optimization. arXiv preprint arXiv:2009.04373 (2020)"},{"key":"2_CR37","unstructured":"Lin, H., Mairal, J., Harchaoui, Z.: A universal catalyst for first-order optimization. In: Advances in Neural Information Processing Systems, pp. 3384\u20133392 (2015)"},{"key":"2_CR38","unstructured":"Lin, H., Mairal, J., Harchaoui, Z.: Catalyst acceleration for first-order convex optimization: from theory to practice. arXiv preprint arXiv:1712.05654 (2018)"},{"key":"2_CR39","unstructured":"Lin, T., Jin, C., Jordan, M.: On gradient descent ascent for nonconvex-concave minimax problems. In: International Conference on Machine Learning, pp. 6083\u20136093. PMLR (2020)"},{"key":"2_CR40","unstructured":"Mishchenko, K., Iutzeler, F., Malick, J., Amini, M.R.: A delay-tolerant proximal-gradient algorithm for distributed learning. In: International Conference on Machine Learning, pp. 3587\u20133595 (2018)"},{"issue":"2","key":"2_CR41","doi-asserted-by":"publisher","first-page":"1092","DOI":"10.1137\/110833786","volume":"23","author":"RD Monteiro","year":"2013","unstructured":"Monteiro, R.D., Svaiter, B.F.: An accelerated hybrid proximal extragradient method for convex optimization and its implications to second-order methods. SIAM J. Optim. 23(2), 1092\u20131125 (2013)","journal-title":"SIAM J. Optim."},{"issue":"2","key":"2_CR42","doi-asserted-by":"publisher","first-page":"341","DOI":"10.1137\/100802001","volume":"22","author":"Y Nesterov","year":"2012","unstructured":"Nesterov, Y.: Efficiency of coordinate descent methods on huge-scale optimization problems. SIAM J. Optim. 22(2), 341\u2013362 (2012)","journal-title":"SIAM J. Optim."},{"key":"2_CR43","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-319-91578-4","volume-title":"Lectures on Convex Optimization","author":"Y Nesterov","year":"2018","unstructured":"Nesterov, Y.: Lectures on Convex Optimization, vol. 137. Springer, Cham (2018). https:\/\/doi.org\/10.1007\/978-3-319-91578-4"},{"key":"2_CR44","unstructured":"Nesterov, Y., Gasnikov, A., Guminov, S., Dvurechensky, P.: Primal-dual accelerated gradient descent with line search for convex and nonconvex optimization problems. arXiv preprint arXiv:1809.05895 (2018)"},{"issue":"1","key":"2_CR45","doi-asserted-by":"publisher","first-page":"110","DOI":"10.1137\/16M1060182","volume":"27","author":"Y Nesterov","year":"2017","unstructured":"Nesterov, Y., Stich, S.U.: Efficiency of the accelerated coordinate descent method on structured optimization problems. SIAM J. Optim. 27(1), 110\u2013123 (2017)","journal-title":"SIAM J. Optim."},{"key":"2_CR46","unstructured":"Palaniappan, B., Bach, F.: Stochastic variance reduction methods for saddle-point problems. In: Advances in Neural Information Processing Systems, pp. 1416\u20131424 (2016)"},{"key":"2_CR47","unstructured":"Paquette, C., Lin, H., Drusvyatskiy, D., Mairal, J., Harchaoui, Z.: Catalyst acceleration for gradient-based non-convex optimization. arXiv preprint arXiv:1703.10993 (2017)"},{"key":"2_CR48","doi-asserted-by":"crossref","unstructured":"Parikh, N., Boyd, S., et al.: Proximal algorithms. Found. Trends\u00ae Optim. 1(3), 127\u2013239 (2014)","DOI":"10.1561\/2400000003"},{"key":"2_CR49","unstructured":"Pasechnyuk, D., Anikin, A., Matyukhin, V.: Accelerated proximal envelopes: application to the coordinate descent method. arXiv preprint arXiv:2101.04706 (2021)"},{"key":"2_CR50","unstructured":"Polyak, B.T.: Introduction to optimization. Optimization Software (1987)"},{"issue":"5","key":"2_CR51","doi-asserted-by":"publisher","first-page":"877","DOI":"10.1137\/0314056","volume":"14","author":"RT Rockafellar","year":"1976","unstructured":"Rockafellar, R.T.: Monotone operators and the proximal point algorithm. SIAM J. Control. Optim. 14(5), 877\u2013898 (1976)","journal-title":"SIAM J. Control. Optim."},{"key":"2_CR52","unstructured":"Shalev-Shwartz, S., Zhang, T.: Accelerated proximal stochastic dual coordinate ascent for regularized loss minimization. In: International Conference on Machine Learning, pp. 64\u201372 (2014)"},{"key":"2_CR53","unstructured":"Shamir, O.: An optimal algorithm for bandit and zero-order convex optimization with two-point feedback. J. Mach. Learn. Res. 18, 52:1\u201352:11 (2017)"},{"key":"2_CR54","unstructured":"Tupitsa, N.: Accelerated alternating minimization and adaptability to strong convexity. arXiv preprint arXiv:2006.09097 (2020)"},{"key":"2_CR55","unstructured":"Tupitsa, N., Dvurechensky, P., Gasnikov, A.: Alternating minimization methods for strongly convex optimization. arXiv preprint arXiv:1911.08987 (2019)"},{"key":"2_CR56","unstructured":"Wilson, A.C., Mackey, L., Wibisono, A.: Accelerating rescaled gradient descent: Fast optimization of smooth functions. In: Advances in Neural Information Processing Systems, pp. 13533\u201313543 (2019)"},{"key":"2_CR57","unstructured":"Woodworth, B., et al.: Is local SGD better than minibatch SGD? arXiv preprint arXiv:2002.07839 (2020)"},{"issue":"1","key":"2_CR58","doi-asserted-by":"publisher","first-page":"3","DOI":"10.1007\/s10107-015-0892-3","volume":"151","author":"SJ Wright","year":"2015","unstructured":"Wright, S.J.: Coordinate descent algorithms. Math. Program. 151(1), 3\u201334 (2015)","journal-title":"Math. Program."},{"key":"2_CR59","unstructured":"Yang, J., Zhang, S., Kiyavash, N., He, N.: A catalyst framework for minimax optimization. In: Advances in Neural Information Processing Systems, vol. 33 (2020)"}],"container-title":["Lecture Notes in Computer Science","Optimization and Applications"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/978-3-030-91059-4_2","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2021,12,1]],"date-time":"2021-12-01T23:03:24Z","timestamp":1638399804000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/978-3-030-91059-4_2"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2021]]},"ISBN":["9783030910587","9783030910594"],"references-count":59,"URL":"https:\/\/doi.org\/10.1007\/978-3-030-91059-4_2","relation":{},"ISSN":["0302-9743","1611-3349"],"issn-type":[{"value":"0302-9743","type":"print"},{"value":"1611-3349","type":"electronic"}],"subject":[],"published":{"date-parts":[[2021]]},"assertion":[{"value":"5 November 2021","order":1,"name":"first_online","label":"First Online","group":{"name":"ChapterHistory","label":"Chapter History"}},{"value":"OPTIMA","order":1,"name":"conference_acronym","label":"Conference Acronym","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"International Conference on Optimization and Applications","order":2,"name":"conference_name","label":"Conference Name","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Petrovac","order":3,"name":"conference_city","label":"Conference City","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Montenegro","order":4,"name":"conference_country","label":"Conference Country","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"2021","order":5,"name":"conference_year","label":"Conference Year","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"27 September 2021","order":7,"name":"conference_start_date","label":"Conference Start Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"1 October 2021","order":8,"name":"conference_end_date","label":"Conference End Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"12","order":9,"name":"conference_number","label":"Conference Number","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"optima2021","order":10,"name":"conference_id","label":"Conference ID","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"http:\/\/agora.guru.ru\/display.php?conf=OPTIMA-2021","order":11,"name":"conference_url","label":"Conference URL","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Single-blind","order":1,"name":"type","label":"Type","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"EasyChair","order":2,"name":"conference_management_system","label":"Conference Management System","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"63","order":3,"name":"number_of_submissions_sent_for_review","label":"Number of Submissions Sent for Review","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"41","order":4,"name":"number_of_full_papers_accepted","label":"Number of Full Papers Accepted","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"3","order":5,"name":"number_of_short_papers_accepted","label":"Number of Short Papers Accepted","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"65% - The value is computed by the equation \"Number of Full Papers Accepted \/ Number of Submissions Sent for Review * 100\" and then rounded to a whole number.","order":6,"name":"acceptance_rate_of_full_papers","label":"Acceptance Rate of Full Papers","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"3.1","order":7,"name":"average_number_of_reviews_per_paper","label":"Average Number of Reviews per Paper","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"2.5","order":8,"name":"average_number_of_papers_per_reviewer","label":"Average Number of Papers per Reviewer","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"Yes","order":9,"name":"external_reviewers_involved","label":"External Reviewers Involved","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}}]}}