{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,3]],"date-time":"2026-03-03T08:15:38Z","timestamp":1772525738980,"version":"3.50.1"},"reference-count":83,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"6","license":[{"start":{"date-parts":[[2024,6,1]],"date-time":"2024-06-01T00:00:00Z","timestamp":1717200000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2024,6,1]],"date-time":"2024-06-01T00:00:00Z","timestamp":1717200000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,6,1]],"date-time":"2024-06-01T00:00:00Z","timestamp":1717200000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"name":"NSF CAREER","award":["2144147"],"award-info":[{"award-number":["2144147"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Pattern Anal. Mach. Intell."],"published-print":{"date-parts":[[2024,6]]},"DOI":"10.1109\/tpami.2024.3355289","type":"journal-article","created":{"date-parts":[[2024,1,17]],"date-time":"2024-01-17T18:25:40Z","timestamp":1705515940000},"page":"4246-4261","source":"Crossref","is-referenced-by-count":13,"title":["Federated Gaussian Process: Convergence, Automatic Personalization and Multi-Fidelity Modeling"],"prefix":"10.1109","volume":"46","author":[{"ORCID":"https:\/\/orcid.org\/0000-0001-9929-8895","authenticated-orcid":false,"given":"Xubo","family":"Yue","sequence":"first","affiliation":[{"name":"Mechanical and Industrial Engineering, Northeastern University, Boston, MA, USA"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-4546-324X","authenticated-orcid":false,"given":"Raed","family":"Kontar","sequence":"additional","affiliation":[{"name":"Industrial and Operations Engineering, University of Michigan, Ann Arbor, MI, USA"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2021.3127448"},{"key":"ref2","first-page":"383","article-title":"Backpropagation convergence via deterministic nonmonotone perturbed minimization","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Mangasarian"},{"key":"ref3","first-page":"1273","article-title":"Communication-efficient learning of deep networks from decentralized data","volume-title":"Proc. Int. Conf. Artif. Intell. Statist.","author":"McMahan"},{"key":"ref4","article-title":"CoCoA: A general framework for communication-efficient distributed optimization","volume":"18","author":"Smith","year":"2018","journal-title":"J. Mach. Learn. Res."},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1016\/j.ijmedinf.2018.01.007"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1109\/INFOCOM.2019.8737464"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/LCOMM.2019.2921755"},{"key":"ref8","article-title":"The Renyi Gaussian process: Towards improved generalization","author":"Yue","year":"2019"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1109\/MSP.2020.2975749"},{"key":"ref10","first-page":"634","article-title":"Analyzing federated learning through an adversarial lens","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Bhagoji"},{"key":"ref11","article-title":"Attack of the tails: Yes, you really can backdoor federated learning","author":"Wang","year":"2020"},{"key":"ref12","first-page":"6357","article-title":"Ditto: Fair and robust federated learning through personalization","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Li"},{"key":"ref13","first-page":"1","article-title":"Fair resource allocation in federated learning","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Li"},{"key":"ref14","first-page":"4615","article-title":"Agnostic federated learning","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Mohri"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1287\/ijds.2022.0022"},{"key":"ref16","article-title":"Improving fairness via federated learning","author":"Zeng","year":"2021"},{"key":"ref17","first-page":"16","article-title":"Federated learning with matched averaging","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Wang"},{"key":"ref18","first-page":"5132","article-title":"Scaffold: Stochastic controlled averaging for federated learning","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Karimireddy"},{"key":"ref19","article-title":"Federated accelerated stochastic gradient descent","author":"Yuan","year":"2020"},{"key":"ref20","article-title":"Fed-ensemble: Improving generalization through model ensembling in federated learning","author":"Shi","year":"2021"},{"key":"ref21","first-page":"2883","article-title":"High dimensional Bayesian optimization with elastic Gaussian process","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Rana"},{"key":"ref22","first-page":"2808","article-title":"Why non-myopic Bayesian optimization is promising and how far should we look-ahead? A study via rollout","volume-title":"Proc. Int. Conf. Artif. Intell. Statist.","author":"Yue"},{"key":"ref23","first-page":"4794","article-title":"Binoculars for efficient, nonmyopic sequential experimental design","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Jiang"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1080\/00224065.2021.1930618"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1016\/j.addma.2016.05.009"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.1109\/TR.2016.2635149"},{"key":"ref27","first-page":"19","article-title":"Nested Gaussian process modeling for high-dimensional data imputation in healthcare systems","volume-title":"Proc. Conf. Expo","author":"Imani"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1007\/s10489-020-01889-9"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1109\/IVS.2018.8500614"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2013.218"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/LRA.2020.3010456"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.1214\/ss\/1177012413"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.1080\/01621459.1991.10475138"},{"key":"ref34","article-title":"Federated optimization in heterogeneous networks","author":"Li","year":"2018"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1109\/tsp.2021.3115952"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1073\/pnas.1611835114"},{"key":"ref37","article-title":"Personalized federated learning with Moreau envelopes","author":"Dinh","year":"2020"},{"key":"ref38","article-title":"Federated learning with personalization layers","author":"Arivazhagan","year":"2019"},{"key":"ref39","article-title":"Think locally, act globally: Federated learning with local and global representations","author":"Liang","year":"2020"},{"key":"ref40","article-title":"Federated multi-task learning","author":"Smith","year":"2017"},{"key":"ref41","first-page":"24730","article-title":"Modular Gaussian processes for transfer learning","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Moreno-Mu\u00f1oz"},{"key":"ref42","first-page":"9","article-title":"Distributed variational inference in sparse Gaussian process regression and latent variable models","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Gal"},{"key":"ref43","first-page":"1481","article-title":"Distributed Gaussian processes","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Deisenroth"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.1109\/cdc.2012.6426691"},{"key":"ref45","article-title":"Parallelizing MCMC via Weierstrass sampler","author":"Wang","year":"2013"},{"issue":"1","key":"ref46","first-page":"115","article-title":"Communication-efficient sparse regression","volume":"18","author":"Lee","year":"2017","journal-title":"J. Mach. Learn. Res."},{"issue":"1","key":"ref47","first-page":"3202","article-title":"Distributed learning with regularized least squares","volume":"18","author":"Lin","year":"2017","journal-title":"J. Mach. Learn. Res."},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.1214\/18-AOS1777"},{"key":"ref49","doi-asserted-by":"publisher","DOI":"10.1080\/01621459.2021.1886937"},{"key":"ref50","doi-asserted-by":"publisher","DOI":"10.1080\/01621459.2021.1891925"},{"key":"ref51","doi-asserted-by":"publisher","DOI":"10.1080\/01621459.2021.1969238"},{"key":"ref52","doi-asserted-by":"publisher","DOI":"10.1109\/JSAC.2020.3041388"},{"key":"ref53","article-title":"Generalized product of experts for automatic and principled fusion of Gaussian process predictions","author":"Cao","year":"2014"},{"key":"ref54","article-title":"Hierarchical mixture-of-experts model for large-scale Gaussian process regression","author":"Ng","year":"2014"},{"key":"ref55","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2019.2906207"},{"key":"ref56","doi-asserted-by":"publisher","DOI":"10.23919\/JCC.2022.01.016"},{"key":"ref57","article-title":"Adam: A method for stochastic optimization","author":"Kingma","year":"2014"},{"key":"ref58","article-title":"On large-batch training for deep learning: Generalization gap and sharp minima","author":"Keskar","year":"2016"},{"key":"ref59","first-page":"8279","article-title":"How SGD selects the global minima in over-parameterized learning: A dynamical stability perspective","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Wu"},{"key":"ref60","article-title":"Stochastic gradient descent in correlated settings: A study on Gaussian processes","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Chen"},{"key":"ref61","article-title":"Gaussian processes for Big Data","author":"Hensman","year":"2013"},{"key":"ref62","doi-asserted-by":"publisher","DOI":"10.1016\/j.spasta.2013.06.003"},{"key":"ref63","first-page":"862","article-title":"Rates of convergence for sparse variational Gaussian process regression","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Burt"},{"key":"ref64","volume-title":"Gaussian Processes for Machine Learning","author":"Williams","year":"2006"},{"key":"ref65","article-title":"On the convergence of fedavg on non-IID data","author":"Li","year":"2019"},{"key":"ref66","doi-asserted-by":"publisher","DOI":"10.2514\/1.J056513"},{"key":"ref67","article-title":"Deep Gaussian processes for multi-fidelity modeling","author":"Cutajar","year":"2019"},{"key":"ref68","doi-asserted-by":"publisher","DOI":"10.1016\/j.ast.2020.106339"},{"key":"ref69","doi-asserted-by":"publisher","DOI":"10.3934\/acse.2023015"},{"key":"ref70","doi-asserted-by":"publisher","DOI":"10.1137\/16M1082469"},{"key":"ref71","doi-asserted-by":"publisher","DOI":"10.1093\/biomet\/87.1.1"},{"key":"ref72","doi-asserted-by":"publisher","DOI":"10.2514\/1.32308"},{"key":"ref73","first-page":"207","article-title":"Deep Gaussian processes","volume-title":"Proc. 16th Int. Conf. Artif. Intell. Statist.","author":"Damianou"},{"key":"ref74","doi-asserted-by":"publisher","DOI":"10.1080\/00401706.2012.723572"},{"key":"ref75","doi-asserted-by":"publisher","DOI":"10.1016\/S0167-9473(00)00057-8"},{"key":"ref76","doi-asserted-by":"publisher","DOI":"10.1098\/rspa.2016.0751"},{"key":"ref77","doi-asserted-by":"publisher","DOI":"10.1080\/00401706.2012.725994"},{"key":"ref78","doi-asserted-by":"publisher","DOI":"10.1080\/00401706.2012.650527"},{"key":"ref79","doi-asserted-by":"publisher","DOI":"10.1109\/ACC.2008.4586493"},{"key":"ref80","doi-asserted-by":"publisher","DOI":"10.1007\/s10339-011-0404-1"},{"key":"ref81","article-title":"Partitioned variational inference: A unified framework encompassing federated and continual learning","author":"Bui","year":"2018"},{"key":"ref82","first-page":"878","article-title":"Turbofan engine degradation simulation data set","volume":"18","author":"Saxena","year":"2008","journal-title":"NASA Ames Prognostics Data Repository"},{"key":"ref83","first-page":"16937","article-title":"Inverting gradients-how easy is it to break privacy in federated learning?","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Geiping"}],"container-title":["IEEE Transactions on Pattern Analysis and Machine Intelligence"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/34\/10522060\/10402074.pdf?arnumber=10402074","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,4,4]],"date-time":"2025-04-04T19:38:58Z","timestamp":1743795538000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10402074\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,6]]},"references-count":83,"journal-issue":{"issue":"6"},"URL":"https:\/\/doi.org\/10.1109\/tpami.2024.3355289","relation":{},"ISSN":["0162-8828","2160-9292","1939-3539"],"issn-type":[{"value":"0162-8828","type":"print"},{"value":"2160-9292","type":"electronic"},{"value":"1939-3539","type":"electronic"}],"subject":[],"published":{"date-parts":[[2024,6]]}}}