{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,13]],"date-time":"2026-03-13T04:40:28Z","timestamp":1773376828341,"version":"3.50.1"},"reference-count":48,"publisher":"IEEE","license":[{"start":{"date-parts":[[2024,7,7]],"date-time":"2024-07-07T00:00:00Z","timestamp":1720310400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,7,7]],"date-time":"2024-07-07T00:00:00Z","timestamp":1720310400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024,7,7]]},"DOI":"10.1109\/isit57864.2024.10619654","type":"proceedings-article","created":{"date-parts":[[2024,8,19]],"date-time":"2024-08-19T13:25:01Z","timestamp":1724073901000},"page":"2682-2687","source":"Crossref","is-referenced-by-count":0,"title":["Data-Dependent Generalization Bounds via Variable-Size Compressibility"],"prefix":"10.1109","author":[{"given":"Milad","family":"Sefidgaran","sequence":"first","affiliation":[{"name":"Paris Research Center, Huawei Technologies,France"}]},{"given":"Abdellatif","family":"Zaidi","sequence":"additional","affiliation":[{"name":"Universit&#x00E9; Gustave Eiffel,France"}]}],"member":"263","reference":[{"key":"ref1","first-page":"09","article-title":"Controlling bias in adaptive data analysis using information theory","volume-title":"Proceedings of the 19th International Conference on Artificial Intelligence and Statistics, ser. Proceedings of Machine Learning Research","volume":"51","author":"Russo"},{"key":"ref2","article-title":"Information-theoretic analysis of generalization capability of learning algorithms","volume":"30","author":"Xu","year":"2017","journal-title":"Advances in Neural Information Processing Systems"},{"key":"ref3","first-page":"09","article-title":"Reasoning about generalization via con-ditional mutual information","volume-title":"Proceedings of Thirty Third Conference on Learning Theory, ser. Proceedings of Machine Learning Research","volume":"125","author":"Steinke"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/tit.2021.3085190"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1109\/TIT.2022.3144615"},{"key":"ref6","first-page":"10108","article-title":"A new family of generalization bounds using samplewise evaluated cmi","volume":"35","author":"Hellstr\u00f6m","year":"2022","journal-title":"Advances in Neural Information Processing Systems"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/TIT.2023.3268527"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/ISIT.2018.8437571"},{"key":"ref9","volume-title":"Information-theoretic generalization bounds for sgld via data-dependent estimates","author":"Negrea","year":"2020"},{"key":"ref10","article-title":"Relating data compression and learn-ability","author":"Littlestone","year":"1986","journal-title":"Citeseer"},{"key":"ref11","first-page":"254","article-title":"Stronger generalization bounds for deep nets via a compression approach","volume-title":"International Conference on Machine Learning","author":"Arora"},{"key":"ref12","first-page":"4416","article-title":"Rate-distortion theoretic generalization bounds for stochastic learning algorithms","volume-title":"Conference on Learning Theory","author":"Sefidgaran"},{"key":"ref13","first-page":"5138","article-title":"Haus-dorff dimension, heavy tails, and generalization in neural networks","volume-title":"Advances in Neural Information Processing Systems","volume":"33","author":"\u015eim\u015fekli","year":"2020"},{"key":"ref14","article-title":"Intrinsic dimension, persistent homology and generalization in neural networks","author":"Birdal","year":"2021","journal-title":"Advances in Neural Information Processing Systems (NeurIPS)"},{"key":"ref15","first-page":"8774","article-title":"Gener-alization bounds using lower tail exponents in stochastic optimizers","volume-title":"International Conference on Machine Learning","author":"Hodgkinson"},{"key":"ref16","first-page":"26590","article-title":"Chaotic regularization and heavy-tailed limits for deterministic gradient descent","volume":"35","author":"Lim","year":"2022","journal-title":"Advances in Neural Information Processing Systems"},{"key":"ref17","first-page":"18774","article-title":"Fractal structure and generalization properties of stochastic optimization algorithms","volume":"34","author":"Camuto","year":"2021","journal-title":"Advances in Neural Information Processing Systems"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1145\/279943.279989"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1561\/2200000100"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1109\/tit.2024.3414266"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-540-72927-3_10"},{"key":"ref22","article-title":"Pac-bayesian supervised classification","volume":"1277","author":"Catoni","year":"2007","journal-title":"Lecture Notes-Monograph. Series. IMS"},{"key":"ref23","first-page":"663","article-title":"Limitations of information-theoretic gen-eralization bounds for gradient descent methods in stochastic convex optimization","volume-title":"International Conference on Algorithmic Learning Theory","author":"Haghifam"},{"key":"ref24","article-title":"Information theoretic lower bounds for information theoretic upper bounds","author":"Livni","year":"2023","journal-title":"arXiv preprint"},{"key":"ref25","first-page":"16833","article-title":"Pac-bayes analysis beyond the usual bounds","volume":"33","author":"Rivasplata","year":"2020","journal-title":"Advances in Neural Information Processing Systems"},{"key":"ref26","article-title":"A pac-bayes bound for deterministic classifiers","author":"Clerico","year":"2022","journal-title":"arXiv preprint"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.1145\/307400.307435"},{"issue":"Oct","key":"ref28","first-page":"233","article-title":"Pac-bayesian generalisation error bounds for gaussian process classification","volume":"3","author":"Seeger","year":"2002","journal-title":"Journal of machine learning research"},{"key":"ref29","article-title":"A pac-bayesian approach to adaptive classification","volume":"840","author":"Catoni","year":"2003","journal-title":"preprint"},{"key":"ref30","article-title":"A note on the pac bayesian theorem","author":"Maurer","year":"2004","journal-title":"arXiv preprint cs\/0411099"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1145\/1553374.1553419"},{"key":"ref32","article-title":"Pac-bayes-empirical-bernstein inequal-ity","volume":"26","author":"Tolstikhin","year":"2013","journal-title":"Advances in Neural Information Processing Systems"},{"key":"ref33","first-page":"466","article-title":"A strongly quasi-convex pac-bayesian bound","volume-title":"International Conference on Algorithmic Learning Theory","author":"Thiemann"},{"key":"ref34","first-page":"435","article-title":"Pac-bayesian bounds based on the r\u00e9nyi divergence","volume-title":"Artificial Intelligence and Statistics","author":"B\u00e9gin","year":"2016"},{"key":"ref35","article-title":"A general framework for the disintegration of pac-bayesian bounds","author":"Viallard","year":"2021","journal-title":"arXiv preprint"},{"key":"ref36","volume-title":"Bounds for averaging classifiers","author":"Langford","year":"2001"},{"key":"ref37","article-title":"Pac-bayes & margins","volume":"15","author":"Langford","year":"2002","journal-title":"Advances in neural information processing systems"},{"key":"ref38","first-page":"3709","article-title":"On margins and derandomisation in pac-bayes","volume-title":"International Conference on Artificial Intelligence and Statistics","author":"Biggs"},{"key":"ref39","first-page":"4772","article-title":"Catastrophic fisher explosion: Early phase fisher matrix impacts generalization","volume-title":"International Conference on Machine Learning","author":"Jastrzebski"},{"issue":"1","key":"ref40","first-page":"7479","article-title":"Implicit self-regularization in deep neural networks: Evidence from random matrix theory and implications for learning","volume":"22","author":"Martin","year":"2021","journal-title":"The Journal of Machine Learning Research"},{"key":"ref41","doi-asserted-by":"publisher","DOI":"10.1109\/TIT.2012.2186786"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.1109\/TIT.2019.2919718"},{"key":"ref43","doi-asserted-by":"publisher","DOI":"10.1109\/ISIT.2016.7541665"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.1109\/TIT.2019.2922186"},{"key":"ref45","doi-asserted-by":"publisher","DOI":"10.1007\/BF02063299"},{"key":"ref46","article-title":"Universal compressed sensing of markov sources","author":"Jalali","year":"2014","journal-title":"arXiv preprint"},{"key":"ref47","doi-asserted-by":"publisher","DOI":"10.1109\/TIT.2018.2806219"},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.1109\/ISIT50566.2022.9834845"}],"event":{"name":"2024 IEEE International Symposium on Information Theory (ISIT)","location":"Athens, Greece","start":{"date-parts":[[2024,7,7]]},"end":{"date-parts":[[2024,7,12]]}},"container-title":["2024 IEEE International Symposium on Information Theory (ISIT)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/10619013\/10619074\/10619654.pdf?arnumber=10619654","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,3,12]],"date-time":"2026-03-12T20:28:35Z","timestamp":1773347315000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10619654\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,7,7]]},"references-count":48,"URL":"https:\/\/doi.org\/10.1109\/isit57864.2024.10619654","relation":{},"subject":[],"published":{"date-parts":[[2024,7,7]]}}}