{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,12,10]],"date-time":"2025-12-10T09:07:41Z","timestamp":1765357661480,"version":"3.40.3"},"reference-count":80,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"4","license":[{"start":{"date-parts":[[2025,4,1]],"date-time":"2025-04-01T00:00:00Z","timestamp":1743465600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2025,4,1]],"date-time":"2025-04-01T00:00:00Z","timestamp":1743465600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,4,1]],"date-time":"2025-04-01T00:00:00Z","timestamp":1743465600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100003453","name":"Natural Science Foundation of Guangdong Province","doi-asserted-by":"publisher","award":["2024A1515010089","2022A1515010179","2023A1515011281"],"award-info":[{"award-number":["2024A1515010089","2022A1515010179","2023A1515011281"]}],"id":[{"id":"10.13039\/501100003453","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62272173","61571005"],"award-info":[{"award-number":["62272173","61571005"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Neural Netw. Learning Syst."],"published-print":{"date-parts":[[2025,4]]},"DOI":"10.1109\/tnnls.2024.3406635","type":"journal-article","created":{"date-parts":[[2024,8,15]],"date-time":"2024-08-15T17:40:36Z","timestamp":1723743636000},"page":"6723-6737","source":"Crossref","is-referenced-by-count":3,"title":["Neural Operator Variational Inference Based on Regularized Stein Discrepancy for Deep Gaussian Processes"],"prefix":"10.1109","volume":"36","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-0350-5528","authenticated-orcid":false,"given":"Jian","family":"Xu","sequence":"first","affiliation":[{"name":"School of Mathematics, South China University of Technology, Guangzhou, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-9392-6846","authenticated-orcid":false,"given":"Shian","family":"Du","sequence":"additional","affiliation":[{"name":"Shenzhen International Graduate School, Tsinghua University, Shenzhen, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-9677-0768","authenticated-orcid":false,"given":"Junmei","family":"Yang","sequence":"additional","affiliation":[{"name":"School of Electronic and Information Engineering, South China University of Technology, Guangzhou, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-9356-2883","authenticated-orcid":false,"given":"Qianli","family":"Ma","sequence":"additional","affiliation":[{"name":"School of Computer Science and Engineering, South China University of Technology, Guangzhou, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-7322-1873","authenticated-orcid":false,"given":"Delu","family":"Zeng","sequence":"additional","affiliation":[{"name":"School of Electronic and Information Engineering, South China University of Technology, Guangzhou, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.7551\/mitpress\/3206.001.0001"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1109\/ICRA46639.2022.9812405"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2014.2319052"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/TASE.2019.2917887"},{"key":"ref5","first-page":"9443","article-title":"Deep neural networks as point estimates for deep Gaussian processes","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"34","author":"Dutordoir"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1109\/CEC.2018.8477946"},{"key":"ref7","first-page":"613","article-title":"Interpretable deep Gaussian processes with moments","volume-title":"Proc. Int. Conf. Artif. Intell. Statist.","author":"Lu"},{"key":"ref8","first-page":"8248","article-title":"Global inducing point variational posteriors for Bayesian neural networks and deep Gaussian processes","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Ober"},{"issue":"54","key":"ref9","first-page":"1","article-title":"How deep are deep Gaussian processes?","volume":"19","author":"Dunlop","year":"2018","journal-title":"J. Mach. Learn. Res."},{"key":"ref10","first-page":"1257","article-title":"Sparse Gaussian processes using pseudo-inputs","volume-title":"Proc. Conf. Neural Inf. Process. Syst.","author":"Snelson"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.5555\/1046920.1194909"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1109\/72.883477"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2020.3008496"},{"key":"ref14","first-page":"351","article-title":"Scalable variational Gaussian process classification","volume-title":"Proc. 18th Int. Conf. Artif. Intell. Statist.","volume":"38","author":"Hensman"},{"key":"ref15","first-page":"1481","article-title":"Distributed Gaussian processes","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Deisenroth"},{"key":"ref16","first-page":"3257","article-title":"Distributed variational inference in sparse Gaussian process regression and latent variable models","volume-title":"Proc. Conf. Neural Inf. Process. Syst.","author":"Gal"},{"key":"ref17","first-page":"282","article-title":"Gaussian processes for big data","volume-title":"Proc. 29th Conf. Uncertainty Artif. Intell.","author":"Hensman"},{"key":"ref18","first-page":"569","article-title":"A unifying framework of anytime sparse Gaussian process regression models with stochastic variational inference for big data","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Hoang"},{"key":"ref19","first-page":"382","article-title":"A distributed variational inference framework for unifying parallel sparse Gaussian process regression models","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Hoang"},{"article-title":"Variational model selection for sparse Gaussian process regression","year":"2009","author":"Titsias","key":"ref20"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2019.2957109"},{"key":"ref22","first-page":"4588","article-title":"Doubly stochastic variational inference for deep Gaussian processes","volume-title":"Proc. Conf. Neural Inf. Process. Syst.","author":"Salimbeni"},{"key":"ref23","first-page":"7517","article-title":"Inference in deep Gaussian processes using stochastic gradient Hamiltonian Monte Carlo","volume-title":"Proc. Conf. Neural Inf. Process. Syst.","author":"Havasi"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1287\/opre.2021.2162"},{"key":"ref25","first-page":"2926","article-title":"Hierarchical inducing point Gaussian process for inter-domian observations","volume-title":"Proc. Int. Conf. Artif. Intell. Statist.","author":"Wu"},{"key":"ref26","first-page":"8498","article-title":"Beyond the mean-field: Structured deep Gaussian processes improve the predictive uncertainties","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"33","author":"Lindinger"},{"key":"ref27","first-page":"1932","article-title":"Sparse orthogonal variational inference for Gaussian processes","volume-title":"Proc. Int. Conf. Artif. Intell. Statist.","author":"Shi"},{"key":"ref28","first-page":"1530","article-title":"Variational inference with normalizing flows","volume-title":"Proc. Int. Conf. Mach. Learn. (ICML)","volume":"37","author":"Rezende"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1109\/IJCNN52387.2021.9533613"},{"key":"ref30","article-title":"Density estimation using real NVP","author":"Dinh","year":"2016","journal-title":"arXiv:1605.08803"},{"key":"ref31","first-page":"2391","article-title":"Adversarial variational Bayes: Unifying variational autoencoders and generative adversarial networks","volume-title":"Proc. Int. Conf. Mach. Learn. (PMLR)","author":"Mescheder"},{"key":"ref32","first-page":"4222","article-title":"Variational implicit processes","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Ma"},{"key":"ref33","first-page":"1","article-title":"Implicit posterior variational inference for deep Gaussian processes","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"32","author":"Yu"},{"key":"ref34","article-title":"Functional variational Bayesian neural networks","author":"Sun","year":"2019","journal-title":"arXiv:1903.05779"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1016\/j.neucom.2020.09.076"},{"key":"ref36","first-page":"18723","article-title":"Function-space inference with sparse implicit processes","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Rodriguez-Santana"},{"key":"ref37","doi-asserted-by":"publisher","DOI":"10.1017\/CBO9781139035613"},{"key":"ref38","first-page":"167","article-title":"Unbiased implicit variational inference","volume-title":"Proc. Conf. Artif. Intell. Statist.","author":"Titsias"},{"key":"ref39","first-page":"1","article-title":"Stein variational gradient descent: A general purpose Bayesian inference algorithm","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"29","author":"Liu"},{"key":"ref40","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2020.3045082"},{"volume-title":"Deep Learning","year":"2016","author":"Goodfellow","key":"ref41"},{"key":"ref42","article-title":"Practical bounds on the error of Bayesian posterior approximations: A nonasymptotic approach","author":"Huggins","year":"2018","journal-title":"arXiv:1809.09505"},{"key":"ref43","first-page":"567","article-title":"Variational learning of inducing variables in sparse Gaussian processes","volume-title":"Proc. 12th Int. Conf. Artif. Intell. Statist.","author":"Titsias"},{"key":"ref44","first-page":"1303","article-title":"Stochastic variational inference","volume":"14","author":"Hoffman","year":"2013","journal-title":"J. Mach. Learn. Res."},{"key":"ref45","first-page":"207","article-title":"Deep Gaussian processes","volume-title":"Proc. Conf. Artif. Intell. Statist.","author":"Damianou"},{"key":"ref46","doi-asserted-by":"publisher","DOI":"10.7551\/mitpress\/1100.001.0001"},{"key":"ref47","first-page":"1","article-title":"Auto-encoding variational Bayes","volume-title":"Proc. Int. Conf. Learn. Represent.","author":"Kingma"},{"key":"ref48","first-page":"1","article-title":"Operator variational inference","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"29","author":"Ranganath"},{"key":"ref49","first-page":"3732","article-title":"Learning the Stein discrepancy for training and evaluating energy-based models without sampling","volume-title":"Proc. Int. Conf. Mach. Learn. (PMLR)","author":"Grathwohl"},{"key":"ref50","article-title":"Variational inference using implicit distributions","author":"Husz\u00e1r","year":"2017","journal-title":"arXiv:1702.08235"},{"key":"ref51","doi-asserted-by":"publisher","DOI":"10.1007\/BF02551274"},{"key":"ref52","first-page":"3094","article-title":"A universal approximation theorem of deep neural networks for expressing probability distributions","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"33","author":"Lu"},{"key":"ref53","first-page":"7610","article-title":"Constructive universal high-dimensional distribution generation through deep ReLU networks","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Perekrestenko"},{"key":"ref54","doi-asserted-by":"publisher","DOI":"10.1016\/j.neunet.2021.10.012"},{"key":"ref55","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2020.3028042"},{"key":"ref56","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2021.3095150"},{"key":"ref57","doi-asserted-by":"publisher","DOI":"10.1080\/03610918908812806"},{"key":"ref58","first-page":"1278","article-title":"Stochastic backpropagation and approximate inference in deep generative models","volume-title":"Proc. Int. Conf. Mach. Learn. (ICML)","author":"Rezende"},{"key":"ref59","first-page":"2575","article-title":"Variational dropout and the local reparameterization trick","volume-title":"Proc. Adv. Neural Inf. Process. Syst. (NIPS)","author":"Blum"},{"issue":"57","key":"ref60","first-page":"1","article-title":"Density estimation in infinite dimensional exponential families","volume":"18","author":"Sriperumbudur","year":"2017","journal-title":"J. Mach. Learn. Res."},{"key":"ref61","first-page":"6840","article-title":"Denoising diffusion probabilistic models","volume-title":"Proc. NIPS","volume":"33","author":"Ho"},{"key":"ref62","doi-asserted-by":"publisher","DOI":"10.1023\/b:jmiv.0000011321.19549.88"},{"key":"ref63","doi-asserted-by":"publisher","DOI":"10.1214\/aos\/1176343842"},{"key":"ref64","doi-asserted-by":"publisher","DOI":"10.1137\/1118101"},{"key":"ref65","doi-asserted-by":"publisher","DOI":"10.1214\/ECP.v18-2578"},{"key":"ref66","article-title":"FFJORD: Free-form continuous dynamics for scalable reversible generative models","author":"Grathwohl","year":"2018","journal-title":"arXiv:1810.01367"},{"key":"ref67","article-title":"The shape of data: Intrinsic distance for data distributions","author":"Tsitsulin","year":"2019","journal-title":"arXiv:1905.11141"},{"key":"ref68","doi-asserted-by":"publisher","DOI":"10.1137\/16M1078148"},{"key":"ref69","article-title":"Deep Gaussian processes with convolutional kernels","author":"Kumar","year":"2018","journal-title":"arXiv:1806.01655"},{"volume-title":"MNIST Handwritten Digit Database","year":"1998","author":"LeCun","key":"ref70"},{"key":"ref71","article-title":"Fashion-MNIST: A novel image dataset for benchmarking machine learning algorithms","author":"Xiao","year":"2017","journal-title":"arXiv:1708.07747"},{"article-title":"Learning multiple layers of features from tiny images","year":"2009","author":"Krizhevsky","key":"ref72"},{"key":"ref73","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.90"},{"key":"ref74","first-page":"1","article-title":"Stochastic variational deep kernel learning","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"29","author":"Wilson"},{"key":"ref75","first-page":"1529","article-title":"Bayesian image classification with deep convolutional Gaussian processes","volume-title":"Proc. Int. Conf. Artif. Intell. Statist.","author":"Dutordoir"},{"key":"ref76","first-page":"582","article-title":"Deep convolutional Gaussian processes","volume-title":"Proc. Joint Eur. Conf. Mach. Learn. Knowl. Discovery Databases","author":"Blomqvist"},{"key":"ref77","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-05318-5_3"},{"key":"ref78","first-page":"5589","article-title":"Deep Gaussian processes with importance-weighted variational inference","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Salimbeni"},{"key":"ref79","first-page":"9148","article-title":"On signal-to-noise ratio issues in variational inference for deep Gaussian processes","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Rudner"},{"key":"ref80","article-title":"Adam: A method for stochastic optimization","author":"Kingma","year":"2014","journal-title":"arXiv:1412.6980"}],"container-title":["IEEE Transactions on Neural Networks and Learning Systems"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/5962385\/10949581\/10637293.pdf?arnumber=10637293","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,4,7]],"date-time":"2025-04-07T21:55:56Z","timestamp":1744062956000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10637293\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,4]]},"references-count":80,"journal-issue":{"issue":"4"},"URL":"https:\/\/doi.org\/10.1109\/tnnls.2024.3406635","relation":{},"ISSN":["2162-237X","2162-2388"],"issn-type":[{"type":"print","value":"2162-237X"},{"type":"electronic","value":"2162-2388"}],"subject":[],"published":{"date-parts":[[2025,4]]}}}