{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,31]],"date-time":"2026-03-31T07:22:41Z","timestamp":1774941761521,"version":"3.50.1"},"reference-count":27,"publisher":"IEEE","license":[{"start":{"date-parts":[[2021,7,12]],"date-time":"2021-07-12T00:00:00Z","timestamp":1626048000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2021,7,12]],"date-time":"2021-07-12T00:00:00Z","timestamp":1626048000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100000780","name":"European Union","doi-asserted-by":"publisher","award":["893082"],"award-info":[{"award-number":["893082"]}],"id":[{"id":"10.13039\/501100000780","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/100010663","name":"European Research Council (ERC)","doi-asserted-by":"publisher","award":["725731"],"award-info":[{"award-number":["725731"]}],"id":[{"id":"10.13039\/100010663","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2021,7,12]]},"DOI":"10.1109\/isit45174.2021.9518020","type":"proceedings-article","created":{"date-parts":[[2021,9,1]],"date-time":"2021-09-01T16:52:42Z","timestamp":1630515162000},"page":"1176-1181","source":"Crossref","is-referenced-by-count":1,"title":["Conditional Mutual Information-Based Generalization Bound for Meta Learning"],"prefix":"10.1109","author":[{"given":"Arezou","family":"Rezazadeh","sequence":"first","affiliation":[{"name":"Chalmers University of Technology"}]},{"given":"Sharu Theresa","family":"Jose","sequence":"additional","affiliation":[{"name":"Kings College London"}]},{"given":"Giuseppe","family":"Durisi","sequence":"additional","affiliation":[{"name":"Chalmers University of Technology"}]},{"given":"Osvaldo","family":"Simeone","sequence":"additional","affiliation":[{"name":"Kings College London"}]}],"member":"263","reference":[{"key":"ref10","article-title":"Reasoning about generalization via conditional mutual information","author":"steinke","year":"2020","journal-title":"Proc Conf Learn Theory (COLT)"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1613\/jair.731"},{"key":"ref12","article-title":"A PAC-Bayesian bound for lifelong learning","author":"pentina","year":"2014","journal-title":"Proc Int Conf on Machine Learning (ICML)"},{"key":"ref13","author":"rothfuss","year":"2020","journal-title":"PACOH Bayes-optimal meta-learning with PAC-guarantees"},{"key":"ref14","article-title":"Meta-learning by adjusting priors based on extended PAC-Bayes theory","author":"amit","year":"2018","journal-title":"Proc Int Conf on Machine Learning (ICML)"},{"key":"ref15","first-page":"457","article-title":"Incremental learning-to-learn with statistical guarantees","author":"denevi","year":"0","journal-title":"UAI"},{"key":"ref16","article-title":"Learning-to-learn stochastic gradient descent with biased regularization","author":"denevi","year":"2019","journal-title":"Proc Int Conf on Machine Learning (ICML)"},{"key":"ref17","article-title":"The advantage of conditional meta-learning for biased regularization and fine-tuning","author":"denevi","year":"2020","journal-title":"Proc Conf Neural Inf Process Syst (NeurIPS)"},{"key":"ref18","article-title":"Learning to learn around a common mean","author":"denevi","year":"2018","journal-title":"Proc Conf Neural Inf Process Syst (NeurIPS)"},{"key":"ref19","author":"konobeev","year":"2020","journal-title":"On Optimality of Meta-Learning in Fixed-Design Regression with Weighted Biased Regularization"},{"key":"ref4","author":"li","year":"2017","journal-title":"Meta-SGD Learning to learn quickly for few shot learning"},{"key":"ref27","doi-asserted-by":"crossref","DOI":"10.1017\/9781108627771","author":"wainwright","year":"2019","journal-title":"High-dimensional statistics A non-asymptotic viewpoint"},{"key":"ref3","article-title":"Model-agnostic meta-learning for fast adaptation of deep networks","author":"finn","year":"2017","journal-title":"Proc Int Conf Machine Learning (ICML)"},{"key":"ref6","article-title":"Controlling bias in adaptive data analysis using information theory","author":"russo","year":"2016","journal-title":"Proc Artif Intell Statist (AISTATS)"},{"key":"ref5","article-title":"Information-theoretic analysis of generalization capability of learning algorithms","author":"xu","year":"2017","journal-title":"Proc Conf Neural Inf Process Syst (NeurIPS)"},{"key":"ref8","article-title":"Information-theoretic generalization bounds for SGLD via data-dependent estimates","author":"negrea","year":"2019","journal-title":"Proc Conf Neural Inf Process Syst (NeurIPS)"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/ISIT.2019.8849590"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1007\/978-1-4615-5529-2_1"},{"key":"ref9","author":"jose","year":"2020","journal-title":"Information-theoretic generalization bounds for meta-learning and applications"},{"key":"ref1","author":"schmidhuber","year":"1987","journal-title":"Evolutionary principles in self-referential learning or on learning how to learn the meta-meta-&#x2026; hook"},{"key":"ref20","article-title":"Provable guarantees for gradient-based meta-learning","author":"balcan","year":"2019","journal-title":"Proc Int Conf Machine Learning (ICML)"},{"key":"ref22","article-title":"Understanding deep learning requires rethinking generalization","author":"zhang","year":"2017","journal-title":"Proc Int Conf Learning Representations (ICLR) 2017"},{"key":"ref21","article-title":"Adaptive gradient-based meta-learning methods","author":"khodak","year":"2019","journal-title":"Proc Conf Neural Inf Process Syst (NeurIPS)"},{"key":"ref24","author":"polyanskiy","year":"2019","journal-title":"Lecture Notes on Information Theory MIT (6 441) UIUC (ECE 563) Yale (STAT 664) 2012&#x2013;2017"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1109\/JSAIT.2020.3040992"},{"key":"ref26","article-title":"Nonvacuous loss bounds with fast rates for neural networks via conditional information measures","author":"hellstr\u00f6m","year":"2021","journal-title":"Proc IEEE Int Symp Inf Theory (ISIT)"},{"key":"ref25","article-title":"On the theory of transfer learning: The importance of task diversity","author":"tripuraneni","year":"2020","journal-title":"Proc Conf Neural Inf Process Syst (NeurIPS)"}],"event":{"name":"2021 IEEE International Symposium on Information Theory (ISIT)","location":"Melbourne, Australia","start":{"date-parts":[[2021,7,12]]},"end":{"date-parts":[[2021,7,20]]}},"container-title":["2021 IEEE International Symposium on Information Theory (ISIT)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/9517708\/9517709\/09518020.pdf?arnumber=9518020","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,3,12]],"date-time":"2026-03-12T20:34:35Z","timestamp":1773347675000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9518020\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2021,7,12]]},"references-count":27,"URL":"https:\/\/doi.org\/10.1109\/isit45174.2021.9518020","relation":{},"subject":[],"published":{"date-parts":[[2021,7,12]]}}}