{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,1,17]],"date-time":"2026-01-17T16:02:49Z","timestamp":1768665769915,"version":"3.49.0"},"reference-count":42,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"1","license":[{"start":{"date-parts":[[2026,2,1]],"date-time":"2026-02-01T00:00:00Z","timestamp":1769904000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2026,2,1]],"date-time":"2026-02-01T00:00:00Z","timestamp":1769904000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2026,2,1]],"date-time":"2026-02-01T00:00:00Z","timestamp":1769904000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62173096"],"award-info":[{"award-number":["62173096"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Open Research Fund from Guangdong Laboratory of Artificial Intelligence and Digital Economy","award":["GML-KF-24-15"],"award-info":[{"award-number":["GML-KF-24-15"]}]},{"name":"Shenzhen Science and Technology Program","award":["SGDX20240115111759002"],"award-info":[{"award-number":["SGDX20240115111759002"]}]},{"name":"Meituan"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Big Data"],"published-print":{"date-parts":[[2026,2]]},"DOI":"10.1109\/tbdata.2025.3618472","type":"journal-article","created":{"date-parts":[[2025,10,7]],"date-time":"2025-10-07T17:50:39Z","timestamp":1759859439000},"page":"249-260","source":"Crossref","is-referenced-by-count":0,"title":["Two-Step Nystr\u00f6m Sampling for Large-Scale Kernel Approximation"],"prefix":"10.1109","volume":"12","author":[{"ORCID":"https:\/\/orcid.org\/0000-0003-0261-4068","authenticated-orcid":false,"given":"Li","family":"He","sequence":"first","affiliation":[{"name":"Guangdong Laboratory of Artificial Intelligence and Digital Economy (SZ), China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-1677-6132","authenticated-orcid":false,"given":"Hong","family":"Zhang","sequence":"additional","affiliation":[{"name":"Department of Electronic and Electrical Engineering, Southern University of Science and Technology, Shenzhen, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/TBDATA.2018.2797977"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i16.17664"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/TBDATA.2020.2982146"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/TCSVT.2023.3267895"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1109\/TBDATA.2019.2907985"},{"issue":"1","key":"ref6","first-page":"981","article-title":"Sampling methods for the Nystr\u00f6m method","volume-title":"J. Mach. Learn. Res.","volume":"13","author":"Kumar","year":"2012"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2018.2796860"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1145\/1273496.1273555"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.23919\/ECC57647.2023.10178323"},{"key":"ref10","first-page":"682","article-title":"Using the Nystr\u00f6m method to speed up kernel machines","volume-title":"Proc. 14th Annu. Conf. Neural Inf. Process. Syst.","author":"Williams"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/34.868688"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2004.1262185"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/TBDATA.2019.2931532"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1073\/pnas.0810600105"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1109\/TNN.2010.2064786"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/TCYB.2014.2358564"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1137\/21M1424627"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1002\/nla.2519"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1137\/1.9781611974348.67"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1007\/s00500-016-2160-8"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1109\/ijcnn48605.2020.9207089"},{"issue":"1","key":"ref22","first-page":"3977","article-title":"Revisiting the Nystrom method for improved large-scale machine learning","volume-title":"J. Mach. Learn. Res.","volume":"17","author":"Gittens","year":"2016"},{"key":"ref23","first-page":"2935","article-title":"Fast statistical leverage score approximation in kernel ridge regression","volume-title":"Proc. 24th Int. Conf. Artif. Intell. Statist.","author":"Chen"},{"issue":"141","key":"ref24","first-page":"1","article-title":"Optimal convergence rates for distributed Nystroem approximation","volume-title":"J. Mach. Learn. Res.","volume":"24","author":"Li","year":"2023"},{"key":"ref25","first-page":"3836","article-title":"Recursive sampling for the Nystr\u00f6m method","volume-title":"Proc. 31st Int. Conf. Neural Inf. Process. Syst.","author":"Musco"},{"key":"ref26","first-page":"5672","article-title":"On fast leverage score sampling and optimal learning","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Rudi"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP43922.2022.9747039"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1137\/23M1565139"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-540-73750-6_11"},{"key":"ref30","first-page":"12678","article-title":"Sampling-based Nystr\u00f6m approximation and kernel quadrature","volume-title":"Proc. 40th Int. Conf. Mach. Learn.","author":"Hayakawa"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/TBDATA.2020.3039036"},{"key":"ref32","first-page":"1060","article-title":"Ensemble Nystrom method","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Kumar"},{"key":"ref33","first-page":"1177","article-title":"Random features for large-scale kernel machines","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Rahimi"},{"key":"ref34","doi-asserted-by":"publisher","DOI":"10.1109\/TCYB.2018.2794998"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1137\/S0895479896298506"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1016\/j.neucom.2017.02.011"},{"key":"ref37","doi-asserted-by":"publisher","DOI":"10.1109\/5.726791"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1109\/ICPR.2006.895"},{"key":"ref39","first-page":"631","article-title":"Making large-scale Nystr\u00f6m approximation possible","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Li"},{"key":"ref40","doi-asserted-by":"publisher","DOI":"10.1007\/s00454-011-9340-1"},{"key":"ref41","first-page":"1471","article-title":"Training and testing low-degree polynomial data mappings via linear SVM","volume-title":"J. Mach. Learn. Res.","volume":"11","author":"Chang","year":"2010"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2001.937655"}],"container-title":["IEEE Transactions on Big Data"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/6687317\/11357241\/11195194.pdf?arnumber=11195194","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,1,17]],"date-time":"2026-01-17T05:19:12Z","timestamp":1768627152000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11195194\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2026,2]]},"references-count":42,"journal-issue":{"issue":"1"},"URL":"https:\/\/doi.org\/10.1109\/tbdata.2025.3618472","relation":{},"ISSN":["2332-7790","2372-2096"],"issn-type":[{"value":"2332-7790","type":"electronic"},{"value":"2372-2096","type":"electronic"}],"subject":[],"published":{"date-parts":[[2026,2]]}}}