{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,10]],"date-time":"2026-03-10T18:29:07Z","timestamp":1773167347624,"version":"3.50.1"},"reference-count":61,"publisher":"Springer Science and Business Media LLC","issue":"5","license":[{"start":{"date-parts":[[2025,3,10]],"date-time":"2025-03-10T00:00:00Z","timestamp":1741564800000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2025,3,10]],"date-time":"2025-03-10T00:00:00Z","timestamp":1741564800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["Mach Learn"],"published-print":{"date-parts":[[2025,5]]},"DOI":"10.1007\/s10994-024-06645-5","type":"journal-article","created":{"date-parts":[[2025,3,10]],"date-time":"2025-03-10T15:13:11Z","timestamp":1741619591000},"update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":0,"title":["Minimum discrepancy principle strategy for choosing k in k-NN regression"],"prefix":"10.1007","volume":"114","author":[{"given":"Yaroslav","family":"Averyanov","sequence":"first","affiliation":[]},{"given":"Alain","family":"Celisse","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2025,3,10]]},"reference":[{"key":"6645_CR1","doi-asserted-by":"crossref","first-page":"716","DOI":"10.1109\/TAC.1974.1100705","volume":"19.6","author":"H Akaike","year":"1974","unstructured":"Akaike, H. (1974). A new look at the statistical model identification. IEEE Transactions on Automatic Control, 19.6, 716\u2013723.","journal-title":"IEEE Transactions on Automatic Control"},{"key":"6645_CR2","series-title":"Selected papers of Hirotugu Akaike","volume-title":"Information theory and an extension of the maximum likelihood principle","author":"H Akaike","year":"1998","unstructured":"Akaike, H. (1998). Information theory and an extension of the maximum likelihood principle. Selected papers of Hirotugu AkaikeSpringer."},{"key":"6645_CR3","unstructured":"Arlot, S., Bach, F. (2009). Data-driven calibration of linear estimators with minimal penalties. In: arXiv e-prints, arXiv:0909.1884 (Sept), arXiv:0909.1884. arXiv: [math.ST]."},{"key":"6645_CR4","first-page":"46","volume":"22","author":"Sylvain Arlot","year":"2009","unstructured":"Arlot, Sylvain, & Bach, Francis R. (2009). Data-driven calibration of linear estimators with minimal penalties. Advances in Neural Information Processing Systems, 22, 46\u201354.","journal-title":"Advances in Neural Information Processing Systems"},{"key":"6645_CR5","doi-asserted-by":"crossref","first-page":"40","DOI":"10.1214\/09-SS054","volume":"4","author":"S Arlot","year":"2010","unstructured":"Arlot, S., Celisse, A., et al. (2010). A survey of cross-validation procedures for model selection. Statistics Surveys, 4, 40\u201379.","journal-title":"Statistics Surveys"},{"issue":"2","key":"6645_CR6","first-page":"245","volume":"10","author":"S Arlot","year":"2009","unstructured":"Arlot, S., & Massart, P. (2009). Data-driven calibration of penalties for least-squares regression. Journal of Machine learning research, 10(2), 245.","journal-title":"Journal of Machine learning research"},{"key":"6645_CR7","unstructured":"Azadkia, M. (2019). Optimal choice of$$k$$for$$k$$-nearest neighbor regression. arXiv: https:\/\/arxiv.org\/abs\/1909.054951909.05495 [math.ST]."},{"issue":"3","key":"6645_CR8","first-page":"1092","volume":"50","author":"Y Baraud","year":"2014","unstructured":"Baraud, Y., Giraud, C., & Huet, S. (2014). Estimator selection in the Gaussian setting. Annales de l\u2019IHP Probabilit \u00e9 s et statistiques., 50(3), 1092\u20131119.","journal-title":"Annales de l\u2019IHP Probabilit \u00e9 s et statistiques."},{"issue":"1","key":"6645_CR9","doi-asserted-by":"crossref","first-page":"52","DOI":"10.1016\/j.jco.2006.07.001","volume":"23","author":"F Bauer","year":"2007","unstructured":"Bauer, F., Pereverzev, S., & Rosasco, L. (2007). On regularization algorithms in learning theory. Journal of complexity, 23(1), 52\u201372.","journal-title":"Journal of complexity"},{"issue":"32","key":"6645_CR10","doi-asserted-by":"crossref","first-page":"15849","DOI":"10.1073\/pnas.1903070116","volume":"116","author":"M Belkin","year":"2019","unstructured":"Belkin, M., et al. (2019). Reconciling modern machine-learning practice and the classical bias-variance trade-off. Proceedings of the National Academy of Sciences, 116(32), 15849\u201315854.","journal-title":"Proceedings of the National Academy of Sciences"},{"issue":"4","key":"6645_CR11","doi-asserted-by":"crossref","first-page":"1167","DOI":"10.1137\/20M1336072","volume":"2","author":"M Belkin","year":"2020","unstructured":"Belkin, M., Hsu, D., & Ji, X. (2020). Two models of double descent for weak features. SIAM Journal on Mathematics of Data Science, 2(4), 1167\u20131180.","journal-title":"SIAM Journal on Mathematics of Data Science"},{"issue":"9","key":"6645_CR12","doi-asserted-by":"crossref","first-page":"509","DOI":"10.1145\/361002.361007","volume":"18","author":"JL Bentley","year":"1975","unstructured":"Bentley, J. L. (1975). Multidimensional binary search trees used for associative searching. Communications of the ACM, 18(9), 509\u2013517.","journal-title":"Communications of the ACM"},{"key":"6645_CR13","doi-asserted-by":"crossref","unstructured":"Bertin, K., Lacour, C., Rivoirard, V. (2016). Adaptive pointwise estimation of conditional density function.","DOI":"10.1214\/14-AIHP665"},{"key":"6645_CR14","doi-asserted-by":"crossref","first-page":"976","DOI":"10.1214\/aos\/1176350487","volume":"15","author":"PK Bhattacharya","year":"1987","unstructured":"Bhattacharya, P. K., & Mack, Y. P. (1987). Weak convergence of k-NN density and regression estimators with varying k and applications. The Annals of Statistics, 15, 976\u2013994.","journal-title":"The Annals of Statistics"},{"issue":"4","key":"6645_CR15","doi-asserted-by":"crossref","first-page":"2034","DOI":"10.1109\/TIT.2010.2040857","volume":"56","author":"G Biau","year":"2010","unstructured":"Biau, G., C\u00e9rou, F., & Guyader, A. (2010). Rates of convergence of the functional $$k$$-nearest neighbor estimate. IEEE Transactions on Information Theory, 56(4), 2034\u20132040.","journal-title":"IEEE Transactions on Information Theory"},{"key":"6645_CR16","doi-asserted-by":"crossref","DOI":"10.1007\/978-3-319-25388-6","volume-title":"Lectures on the nearest neighbor method","author":"G\u00e9rard Biau","year":"2015","unstructured":"Biau, G\u00e9rard., & Devroye, Luc. (2015). Lectures on the nearest neighbor method. Springer."},{"key":"6645_CR17","doi-asserted-by":"crossref","unstructured":"Birg\u00e9, L. (2001). An alternative point of view on Lepski\u2019s method\u201d. In: Lecture Notes-Monograph Series, pp.113\u2013133.","DOI":"10.1214\/lnms\/1215090065"},{"issue":"1\u20132","key":"6645_CR18","doi-asserted-by":"crossref","first-page":"33","DOI":"10.1007\/s00440-006-0011-8","volume":"138","author":"L Birg\u00e9","year":"2007","unstructured":"Birg\u00e9, L., & Massart, P. (2007). Minimal penalties for Gaussian model selection. Probability Theory and Related Fields, 138(1\u20132), 33\u201373.","journal-title":"Probability Theory and Related Fields"},{"issue":"3","key":"6645_CR19","doi-asserted-by":"crossref","first-page":"1043","DOI":"10.1137\/17M1154096","volume":"6","author":"G Blanchard","year":"2018","unstructured":"Blanchard, G., Hoffmann, M., & Rei\u00df, M. (2018). Optimal adaptation for early stopping in statistical inverse problems. SIAM\/ASA Journal on Uncertainty Quantification, 6(3), 1043\u20131075.","journal-title":"SIAM\/ASA Journal on Uncertainty Quantification"},{"issue":"2","key":"6645_CR20","first-page":"3204","volume":"12","author":"G Blanchard","year":"2018","unstructured":"Blanchard, G., Hoffmann, M., Rei\u00df, M., et al. (2018). Early stopping for statistical inverse problems via truncated SVD estimation. Electronic Journal of Statistics, 12(2), 3204\u20133231.","journal-title":"Electronic Journal of Statistics"},{"issue":"4","key":"6645_CR21","first-page":"398","volume":"15","author":"Y Cao","year":"2006","unstructured":"Cao, Y., & Golubev, Y. (2006). On oracle inequalities related to smoothing splines In: Mathematical Methods of Statistics, 15(4), 398\u2013414.","journal-title":"Mathematical Methods of Statistics"},{"issue":"1","key":"6645_CR22","first-page":"2373","volume":"19","author":"A Celisse","year":"2018","unstructured":"Celisse, A., & Mary-Huard, T. (2018). Theoretical analysis of cross-validation for estimating the risk of the k-nearest neighbor classifier. The Journal of Machine Learning Research, 19(1), 2373\u20132426.","journal-title":"The Journal of Machine Learning Research"},{"issue":"76","key":"6645_CR23","first-page":"1","volume":"22","author":"A Celisse","year":"2021","unstructured":"Celisse, A., & Wahl, M. (2021). Analyzing the discrepancy principle for kernelized spectral filter learning algorithms. Journal of Machine Learning Research, 22(76), 1\u201359.","journal-title":"Journal of Machine Learning Research"},{"key":"6645_CR24","doi-asserted-by":"crossref","unstructured":"Collomb, G et al. (1979). Estimation de la regression par la m\u00e9thode des k points les plus proches: propri\u00e9t\u00e9s de convergence ponctuelle.","DOI":"10.1007\/BFb0097428"},{"issue":"4","key":"6645_CR25","doi-asserted-by":"crossref","first-page":"377","DOI":"10.1007\/BF01404567","volume":"31","author":"P Craven","year":"1978","unstructured":"Craven, P., & Wahba, G. (1978). Smoothing noisy data with spline functions. Numerische Mathematik, 31(4), 377\u2013403.","journal-title":"Numerische Mathematik"},{"issue":"2","key":"6645_CR26","doi-asserted-by":"crossref","first-page":"142","DOI":"10.1109\/TIT.1978.1055865","volume":"24","author":"L Devroye","year":"1978","unstructured":"Devroye, L. (1978). The uniform convergence of nearest neighbor regression function estimators and their application in optimization. IEEE Transactions on Information Theory, 24(2), 142\u2013151.","journal-title":"IEEE Transactions on Information Theory"},{"issue":"6","key":"6645_CR27","first-page":"1310","volume":"9","author":"L Devroye","year":"1981","unstructured":"Devroye, L., et al. (1981). On the almost everywhere convergence of nonparametric regression function estimates. The Annals of Statistics, 9(6), 1310\u20131319.","journal-title":"The Annals of Statistics"},{"key":"6645_CR28","unstructured":"Dua, D., Graff, C. (2017). UCI Machine Learning Repository. http:\/\/archive.ics.uci.edu\/ml"},{"key":"6645_CR29","unstructured":"Dwivedi, R. et al. (2020). Revisiting complexity and the bias-variance tradeoff. In: arXiv preprint[SPACE]arXiv:2006.10189."},{"key":"6645_CR30","doi-asserted-by":"crossref","DOI":"10.1007\/978-94-009-1740-8","volume-title":"Regularization of inverse problems","author":"HW Engl","year":"1996","unstructured":"Engl, H. W., Hanke, M., & Neubauer, A. (1996). Regularization of inverse problems (Vol. 375). Springer Science & Business Media."},{"issue":"350","key":"6645_CR31","doi-asserted-by":"crossref","first-page":"320","DOI":"10.1080\/01621459.1975.10479865","volume":"70","author":"S Geisser","year":"1975","unstructured":"Geisser, S. (1975). The predictive sample reuse method with applications. Journal of the American statistical Association, 70(350), 320\u2013328.","journal-title":"Journal of the American statistical Association"},{"issue":"7","key":"6645_CR32","doi-asserted-by":"crossref","first-page":"1873","DOI":"10.1162\/neco.2008.05-07-517","volume":"20","author":"LL Gerfo","year":"2008","unstructured":"Gerfo, L. L., et al. (2008). Spectral algorithms for supervised learning. Neural Computation, 20(7), 1873\u20131897.","journal-title":"Neural Computation"},{"issue":"2","key":"6645_CR33","doi-asserted-by":"crossref","first-page":"209","DOI":"10.1137\/S0040585X97985923","volume":"57","author":"AV Goldenshluger","year":"2013","unstructured":"Goldenshluger, A. V., & Lepski, O. V. (2013). General selection rule from a family of linear estimators. Theory of Probability & Its Applications, 57(2), 209\u2013226.","journal-title":"Theory of Probability & Its Applications"},{"key":"6645_CR34","volume-title":"A distribution-free theory of nonparametric regression","author":"L Gy\u00f6rfi","year":"2006","unstructured":"Gy\u00f6rfi, L., et al. (2006). A distribution-free theory of nonparametric regression. Springer Science & Business Media."},{"key":"6645_CR35","doi-asserted-by":"crossref","DOI":"10.1007\/978-0-387-84858-7","volume-title":"The elements of statistical learning: Data mining, inference, and prediction","author":"T Hastie","year":"2009","unstructured":"Hastie, T., Tibshirani, R., & Friedman, J. (2009). The elements of statistical learning: Data mining, inference, and prediction. Springer Science & Business Media."},{"issue":"3","key":"6645_CR36","doi-asserted-by":"crossref","first-page":"291","DOI":"10.1016\/S0167-7152(96)00140-X","volume":"33","author":"R Kelley Pace","year":"1997","unstructured":"Kelley Pace, R., & Barry, R. (1997). Sparse spatial autoregressions. Statistics & Probability Letters, 33(3), 291\u2013297.","journal-title":"Statistics & Probability Letters"},{"key":"6645_CR37","first-page":"729","volume":"24","author":"S Kpotufe","year":"2011","unstructured":"Kpotufe, S. (2011). k-NN regression adapts to local intrinsic dimension. Advances in neural information processing systems., 24, 729\u2013737.","journal-title":"Advances in neural information processing systems."},{"issue":"11","key":"6645_CR38","doi-asserted-by":"crossref","first-page":"1877","DOI":"10.1002\/cpa.21504","volume":"67","author":"F Krahmer","year":"2014","unstructured":"Krahmer, F., Mendelson, S., & Rauhut, H. (2014). Suprema of chaos processes and the restricted isometry property. Communications on Pure and Applied Mathematics, 67(11), 1877\u20131904.","journal-title":"Communications on Pure and Applied Mathematics"},{"key":"6645_CR39","doi-asserted-by":"crossref","first-page":"87","DOI":"10.1090\/advsov\/012\/04","volume":"12","author":"OV Lepski","year":"1992","unstructured":"Lepski, O. V. (1992). On problems of adaptive estimation in white Gaussian noise. Topics in Nonparametric Estimation, 12, 87\u2013106.","journal-title":"Topics in Nonparametric Estimation"},{"issue":"4","key":"6645_CR40","doi-asserted-by":"crossref","first-page":"682","DOI":"10.1137\/1136085","volume":"36","author":"OV Lepskii","year":"1992","unstructured":"Lepskii, O. V. (1992). Asymptotically minimax adaptive estimation. I: Upper bounds. optimally adaptive estimates. Theory of Probability & its Applications, 36(4), 682\u2013697.","journal-title":"Theory of Probability & its Applications"},{"issue":"3","key":"6645_CR41","doi-asserted-by":"crossref","first-page":"433","DOI":"10.1137\/1137095","volume":"37","author":"OV Lepskii","year":"1993","unstructured":"Lepskii, O. V. (1993). Asymptotically minimax adaptive estimation. II. Schemes without optimal adaptation: Adaptive estimators. Theory of Probability & its Applications, 37(3), 433\u2013448.","journal-title":"Theory of Probability & its Applications"},{"key":"6645_CR42","first-page":"929","volume":"25","author":"OV Lepski","year":"1997","unstructured":"Lepski, O. V., Mammen, E., & Spokoiny, V. G. (1997). Optimal spatial adaptation to inhomogeneous smoothness: An approach based on kernel estimates with variable bandwidth selectors. The Annals of Statistics, 25, 929\u2013947.","journal-title":"The Annals of Statistics"},{"key":"6645_CR43","first-page":"958","volume":"15","author":"K-C Li","year":"1987","unstructured":"Li, K.-C. (1987). Asymptotic optimality for Cp, CL, cross-validation and generalized cross-validation: Discrete index set. The Annals of Statistics, 15, 958\u2013975.","journal-title":"The Annals of Statistics"},{"issue":"1","key":"6645_CR44","first-page":"87","volume":"42","author":"CL Mallows","year":"2000","unstructured":"Mallows, C. L. (2000). Some comments on Cp. Technometrics, 42(1), 87\u201394.","journal-title":"Technometrics"},{"issue":"1","key":"6645_CR45","doi-asserted-by":"crossref","first-page":"141","DOI":"10.1137\/1109020","volume":"9","author":"EA Nadaraya","year":"1964","unstructured":"Nadaraya, E. A. (1964). On estimating regression. Theory of Probability & its Applications, 9(1), 141\u2013142.","journal-title":"Theory of Probability & its Applications"},{"key":"6645_CR46","volume-title":"Five balltree construction algorithms","author":"SM Omohundro","year":"1989","unstructured":"Omohundro, S. M. (1989). Five balltree construction algorithms. International Computer Science Institute Berkeley."},{"issue":"1","key":"6645_CR47","first-page":"335","volume":"15","author":"G Raskutti","year":"2014","unstructured":"Raskutti, G., Wainwright, M. J., & Yu, B. (2014). Early stopping and non-parametric regression: An optimal data-dependent stopping rule. Journal of Machine Learning Research, 15(1), 335\u2013366.","journal-title":"Journal of Machine Learning Research"},{"issue":"4","key":"6645_CR48","doi-asserted-by":"crossref","first-page":"1215","DOI":"10.1214\/aos\/1176346788","volume":"12","author":"J Rice","year":"1984","unstructured":"Rice, J., et al. (1984). Bandwidth choice for nonparametric regression. The Annals of Statistics, 12(4), 1215\u20131230.","journal-title":"The Annals of Statistics"},{"key":"6645_CR49","first-page":"1","volume":"18","author":"Mark Rudelson","year":"2013","unstructured":"Rudelson, Mark, Vershynin, Roman, et al. (2013). Hanson-Wright inequality and sub-gaussian concentration. Electronic Communications in Probability, 18, 1.","journal-title":"Electronic Communications in Probability"},{"issue":"2","key":"6645_CR50","doi-asserted-by":"crossref","first-page":"461","DOI":"10.1214\/aos\/1176344136","volume":"6","author":"G Schwarz","year":"1978","unstructured":"Schwarz, G., et al. (1978). Estimating the dimension of a model. The Annals of Statistics, 6(2), 461\u2013464.","journal-title":"The Annals of Statistics"},{"key":"6645_CR51","volume-title":"Introduction to nonparametric estimation","author":"AB Tsybakov","year":"2008","unstructured":"Tsybakov, A. B. (2008). Introduction to nonparametric estimation. New York: Springer Science & Business Media."},{"key":"6645_CR52","volume-title":"High-dimensional statistics: A non-asymptotic viewpoint","author":"MJ Wainwright","year":"2019","unstructured":"Wainwright, M. J. (2019). High-dimensional statistics: A non-asymptotic viewpoint. Cambridge University Press."},{"key":"6645_CR53","volume-title":"All of nonparametric statistics","author":"L Wasserman","year":"2006","unstructured":"Wasserman, L. (2006). All of nonparametric statistics. Springer Science & Business Media."},{"issue":"1","key":"6645_CR54","doi-asserted-by":"crossref","first-page":"252","DOI":"10.1214\/aos\/1046294464","volume":"31","author":"M Wegkamp","year":"2003","unstructured":"Wegkamp, M., et al. (2003). Model selection in nonparametric regression. The Annals of Statistics, 31(1), 252\u2013273.","journal-title":"The Annals of Statistics"},{"key":"6645_CR55","first-page":"6067","volume":"30","author":"Y Wei","year":"2017","unstructured":"Wei, Y., Yang, F., & Wainwright, M. J. (2017). Early stopping for kernel boosting algorithms: A general analysis with localized complexities. Advances in Neural Information Processing Systems, 30, 6067\u20136077.","journal-title":"Advances in Neural Information Processing Systems"},{"key":"6645_CR56","first-page":"475","volume":"9","author":"Y Yang","year":"1999","unstructured":"Yang, Y. (1999). Model selection for nonparametric regression. Statistica Sinica, 9, 475\u2013499.","journal-title":"Statistica Sinica"},{"issue":"2","key":"6645_CR57","doi-asserted-by":"publisher","first-page":"289","DOI":"10.1007\/s00365-006-0663-2","volume":"26","author":"Y Yao","year":"2007","unstructured":"Yao, Y., Rosasco, L., & Caponnetto, A. (2007). On Early Stopping in Gradient Descent Learning. Constructive Approximation, 26(2), 289\u2013315. https:\/\/doi.org\/10.1007\/s00365-006-0663-2","journal-title":"Constructive Approximation"},{"issue":"4","key":"6645_CR58","doi-asserted-by":"crossref","first-page":"1538","DOI":"10.1214\/009053605000000255","volume":"33","author":"T Zhang","year":"2005","unstructured":"Zhang, T., Bin, Y., et al. (2005). Boosting with early stopping: Convergence and consistency. The Annals of Statistics, 33(4), 1538\u20131579.","journal-title":"The Annals of Statistics"},{"key":"6645_CR59","doi-asserted-by":"crossref","unstructured":"Zhao, P., & Lai, L. (2019). In: 2019 IEEE International Symposium on Information Theory (ISIT). Minimax regression via adaptive nearest neighbor\u20192019, pp. 1447\u20131451.","DOI":"10.1109\/ISIT.2019.8849669"},{"issue":"12","key":"6645_CR60","doi-asserted-by":"crossref","first-page":"11007","DOI":"10.1609\/aaai.v35i12.17314","volume":"35","author":"P Zhao","year":"2021","unstructured":"Zhao, P., & Lai, L. (2021). Efficient classification with adaptive KNN. Proceedings of the AAAI Conference on Artificial Intelligence, 35(12), 11007\u201311014.","journal-title":"Proceedings of the AAAI Conference on Artificial Intelligence"},{"issue":"5","key":"6645_CR61","doi-asserted-by":"crossref","first-page":"3155","DOI":"10.1109\/TIT.2021.3062078","volume":"67","author":"P Zhao","year":"2021","unstructured":"Zhao, P., & Lai, L. (2021). Minimax rate optimal adaptive nearest neighbor classification and regression. IEEE Transactions on Information Theory, 67(5), 3155.","journal-title":"IEEE Transactions on Information Theory"}],"container-title":["Machine Learning"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s10994-024-06645-5.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s10994-024-06645-5","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s10994-024-06645-5.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,3,10]],"date-time":"2026-03-10T01:02:03Z","timestamp":1773104523000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s10994-024-06645-5"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,3,10]]},"references-count":61,"journal-issue":{"issue":"5","published-print":{"date-parts":[[2025,5]]}},"alternative-id":["6645"],"URL":"https:\/\/doi.org\/10.1007\/s10994-024-06645-5","relation":{},"ISSN":["0885-6125","1573-0565"],"issn-type":[{"value":"0885-6125","type":"print"},{"value":"1573-0565","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025,3,10]]},"assertion":[{"value":"23 April 2021","order":1,"name":"received","label":"Received","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"1 March 2024","order":2,"name":"revised","label":"Revised","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"4 October 2024","order":3,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"10 March 2025","order":4,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Declarations"}},{"value":"The authors declare that they have no Conflict of interest.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Conflict of interest"}},{"value":"Not applicable.","order":3,"name":"Ethics","group":{"name":"EthicsHeading","label":"Ethical approval"}},{"value":"Not applicable.","order":4,"name":"Ethics","group":{"name":"EthicsHeading","label":"Consent for publication"}},{"value":"This content has been made available to all.","name":"free","label":"Free to read"}],"article-number":"118"}}