{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2024,9,7]],"date-time":"2024-09-07T03:16:19Z","timestamp":1725678979362},"publisher-location":"Berlin, Heidelberg","reference-count":19,"publisher":"Springer Berlin Heidelberg","isbn-type":[{"type":"print","value":"9783642302169"},{"type":"electronic","value":"9783642302176"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2012]]},"DOI":"10.1007\/978-3-642-30217-6_24","type":"book-chapter","created":{"date-parts":[[2012,5,9]],"date-time":"2012-05-09T10:14:58Z","timestamp":1336558498000},"page":"282-293","source":"Crossref","is-referenced-by-count":3,"title":["Nystr\u00f6m Approximate Model Selection for LSSVM"],"prefix":"10.1007","author":[{"given":"Lizhong","family":"Ding","sequence":"first","affiliation":[]},{"given":"Shizhong","family":"Liao","sequence":"additional","affiliation":[]}],"member":"297","reference":[{"issue":"10","key":"24_CR1","doi-asserted-by":"publisher","first-page":"1467","DOI":"10.1016\/j.neunet.2004.07.002","volume":"17","author":"G.C. Cawley","year":"2004","unstructured":"Cawley, G.C., Talbot, N.L.C.: Fast exact leave-one-out cross-validation of sparse least-squares support vector machines. Neural Networks\u00a017(10), 1467\u20131475 (2004)","journal-title":"Neural Networks"},{"key":"24_CR2","first-page":"841","volume":"8","author":"G.C. Cawley","year":"2007","unstructured":"Cawley, G.C., Talbot, N.L.C.: Preventing over-fitting during model selection via Bayesian regularisation of the hyper-parameters. Journal of Machine Learning Research\u00a08, 841\u2013861 (2007)","journal-title":"Journal of Machine Learning Research"},{"key":"24_CR3","first-page":"2079","volume":"11","author":"G.C. Cawley","year":"2010","unstructured":"Cawley, G.C., Talbot, N.L.C.: On over-fitting in model selection and subsequent selection bias in performance evaluation. Journal of Machine Learning Research\u00a011, 2079\u20132107 (2010)","journal-title":"Journal of Machine Learning Research"},{"key":"24_CR4","first-page":"230","volume-title":"Advances in Neural Information Processing Systems","author":"O. Chapelle","year":"2000","unstructured":"Chapelle, O., Vapnik, V.: Model selection for support vector machines. In: Advances in Neural Information Processing Systems, vol.\u00a012, pp. 230\u2013236. MIT Press, Cambridge (2000)"},{"issue":"1","key":"24_CR5","doi-asserted-by":"publisher","first-page":"131","DOI":"10.1023\/A:1012450327387","volume":"46","author":"O. Chapelle","year":"2002","unstructured":"Chapelle, O., Vapnik, V., Bousquet, O., Mukherjee, S.: Choosing multiple parameters for support vector machines. Machine Learning\u00a046(1), 131\u2013159 (2002)","journal-title":"Machine Learning"},{"key":"24_CR6","unstructured":"Cortes, C., Mohri, M., Talwalkar, A.: On the impact of kernel approximation on learning accuracy. In: Proceedings of the 13th International Conference on Artificial Intelligence and Statistics (AISTATS), Sardinia, Italy, pp. 113\u2013120 (2010)"},{"key":"24_CR7","first-page":"1","volume":"7","author":"J. Dem\u0161ar","year":"2006","unstructured":"Dem\u0161ar, J.: Statistical comparisons of classifiers over multiple data sets. Journal of Machine Learning Research\u00a07, 1\u201330 (2006)","journal-title":"Journal of Machine Learning Research"},{"key":"24_CR8","first-page":"2153","volume":"6","author":"P. Drineas","year":"2005","unstructured":"Drineas, P., Mahoney, M.: On the Nystr\u00f6m method for approximating a Gram matrix for improved kernel-based learning. Journal of Machine Learning Research\u00a06, 2153\u20132175 (2005)","journal-title":"Journal of Machine Learning Research"},{"key":"24_CR9","doi-asserted-by":"publisher","first-page":"41","DOI":"10.1016\/S0925-2312(02)00601-X","volume":"51","author":"K. Duan","year":"2003","unstructured":"Duan, K., Keerthi, S., Poo, A.: Evaluation of simple performance measures for tuning SVM hyperparameters. Neurocomputing\u00a051, 41\u201359 (2003)","journal-title":"Neurocomputing"},{"key":"24_CR10","volume-title":"Matrix Computations","author":"G. Golub","year":"1996","unstructured":"Golub, G., Van Loan, C.: Matrix Computations. Johns Hopkins University Press, Baltimore (1996)"},{"key":"24_CR11","first-page":"61","volume":"11","author":"I. Guyon","year":"2010","unstructured":"Guyon, I., Saffari, A., Dror, G., Cawley, G.: Model selection: Beyond the Bayesian \/ frequentist divide. Journal of Machine Learning Research\u00a011, 61\u201387 (2010)","journal-title":"Journal of Machine Learning Research"},{"key":"24_CR12","doi-asserted-by":"publisher","DOI":"10.1137\/1.9780898718027","volume-title":"Accuracy and stability of numerical algorithms","author":"N. Higham","year":"2002","unstructured":"Higham, N.: Accuracy and stability of numerical algorithms. SIAM, Philadelphia (2002)"},{"key":"24_CR13","unstructured":"Kumar, S., Mohri, M., Talwalkar, A.: Sampling techniques for the Nystr\u00f6m method. In: Proceedings of the 12th International Conference on Artificial Intelligence and Statistics (AISTATS), Clearwater, Florida, USA, pp. 304\u2013311 (2009)"},{"key":"24_CR14","unstructured":"Luntz, A., Brailovsky, V.: On estimation of characters obtained in statistical procedure of recognition. Technicheskaya Kibernetica\u00a03 (1969) (in Russian)"},{"issue":"3","key":"24_CR15","doi-asserted-by":"publisher","first-page":"287","DOI":"10.1023\/A:1007618119488","volume":"42","author":"G. R\u00e4tsch","year":"2001","unstructured":"R\u00e4tsch, G., Onoda, T., M\u00fcller, K.: Soft margins for AdaBoost. Machine Learning\u00a042(3), 287\u2013320 (2001)","journal-title":"Machine Learning"},{"issue":"3","key":"24_CR16","doi-asserted-by":"publisher","first-page":"293","DOI":"10.1023\/A:1018628609742","volume":"9","author":"J. Suykens","year":"1999","unstructured":"Suykens, J., Vandewalle, J.: Least squares support vector machine classifiers. Neural Processing Letters\u00a09(3), 293\u2013300 (1999)","journal-title":"Neural Processing Letters"},{"issue":"9","key":"24_CR17","doi-asserted-by":"publisher","first-page":"2013","DOI":"10.1162\/089976600300015042","volume":"12","author":"V. Vapnik","year":"2000","unstructured":"Vapnik, V., Chapelle, O.: Bounds on error expectation for support vector machines. Neural Computation\u00a012(9), 2013\u20132036 (2000)","journal-title":"Neural Computation"},{"key":"24_CR18","volume-title":"Statistical Learning Theory","author":"V. Vapnik","year":"1998","unstructured":"Vapnik, V.: Statistical Learning Theory. John Wiley & Sons, New York (1998)"},{"key":"24_CR19","first-page":"682","volume-title":"Advances in Neural Information Processing Systems 13","author":"C. Williams","year":"2001","unstructured":"Williams, C., Seeger, M.: Using the Nystr\u00f6m method to speed up kernel machines. In: Advances in Neural Information Processing Systems 13, pp. 682\u2013688. MIT Press, Cambridge (2001)"}],"container-title":["Lecture Notes in Computer Science","Advances in Knowledge Discovery and Data Mining"],"original-title":[],"link":[{"URL":"http:\/\/link.springer.com\/content\/pdf\/10.1007\/978-3-642-30217-6_24.pdf","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2021,5,4]],"date-time":"2021-05-04T11:31:44Z","timestamp":1620127904000},"score":1,"resource":{"primary":{"URL":"http:\/\/link.springer.com\/10.1007\/978-3-642-30217-6_24"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2012]]},"ISBN":["9783642302169","9783642302176"],"references-count":19,"URL":"https:\/\/doi.org\/10.1007\/978-3-642-30217-6_24","relation":{},"ISSN":["0302-9743","1611-3349"],"issn-type":[{"type":"print","value":"0302-9743"},{"type":"electronic","value":"1611-3349"}],"subject":[],"published":{"date-parts":[[2012]]}}}