{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,13]],"date-time":"2026-03-13T04:43:46Z","timestamp":1773377026902,"version":"3.50.1"},"reference-count":25,"publisher":"IEEE","license":[{"start":{"date-parts":[[2025,6,22]],"date-time":"2025-06-22T00:00:00Z","timestamp":1750550400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,6,22]],"date-time":"2025-06-22T00:00:00Z","timestamp":1750550400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025,6,22]]},"DOI":"10.1109\/isit63088.2025.11195231","type":"proceedings-article","created":{"date-parts":[[2025,10,20]],"date-time":"2025-10-20T17:48:08Z","timestamp":1760982488000},"page":"1-6","source":"Crossref","is-referenced-by-count":1,"title":["Model Non-Collapse: Minimax Bounds for Recursive Discrete Distribution Estimation"],"prefix":"10.1109","author":[{"given":"Millen","family":"Kanabar","sequence":"first","affiliation":[{"name":"School of Computer and Communication Sciences,EPFL,Switzerland"}]},{"given":"Michael","family":"Gastpar","sequence":"additional","affiliation":[{"name":"School of Computer and Communication Sciences,EPFL,Switzerland"}]}],"member":"263","reference":[{"key":"ref1","first-page":"1066","article-title":"On Learning Distributions from their Samples","volume-title":"Proceedings of The 28th Conference on Learning Theory","author":"Kamath","year":"2015"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1137\/1.9781611973105.100"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/TIT.2015.2478816"},{"key":"ref4","volume-title":"This newspaper doesn\u2019t exist: How ChatGPT can launch fake news sites in minutes.","author":"Mahadevan","year":"2023"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1038\/s41586-024-07566-y"},{"key":"ref6","author":"Alemohammad","year":"2023","journal-title":"Self-Consuming Generative Models Go MAD"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/ISIT63088.2025.11195231"},{"key":"ref8","article-title":"How bad is training on synthetic data? a statistical analysis of language model collapse","volume-title":"First Conference on Language Modeling, 2024","author":"Seddik"},{"key":"ref9","article-title":"Rate of model collapse in recursive training","volume-title":"The 28th International Conference on Artificial Intelligence and Statistics","author":"Suresh","year":"2025"},{"key":"ref10","article-title":"On the stability of iterative retraining of generative models on their own data","volume-title":"The Twelfth International Conference on Learning Representations","author":"Bertrand","year":"2024"},{"key":"ref11","article-title":"Towards theoretical understandings of self-consuming generative models","volume-title":"Forty-first International Conference on Machine Learning","author":"Fu","year":"2024"},{"key":"ref12","article-title":"Model collapse demystified: The case of regression","volume-title":"The Thirty-eighth Annual Conference on Neural Information Processing Systems","author":"Dohmatob","year":"2024"},{"key":"ref13","article-title":"Is model collapse inevitable? breaking the curse of recursion by accumulating real and synthetic data","volume-title":"First Conference on Language Modeling","author":"Gerstgrasser","year":"2024"},{"key":"ref14","article-title":"Beyond model collapse: Scaling up with synthesized data requires reinforcement","volume-title":"ICML 2024 Workshop on Theoretical Foundations of Foundation Models","author":"Feng","year":"2024"},{"key":"ref15","author":"Dohmatob","year":"2024","journal-title":"Strong Model Collapse"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/CDC56724.2024.10886816"},{"key":"ref17","author":"Dey","year":"2024","journal-title":"Universality of the \u00b52 \/ 6 Pathway in Avoiding Model Collapse"},{"key":"ref18","author":"Amin","year":"2025","journal-title":"Escaping Collapse: The Strength of Weak Data for Large Language Model Training"},{"key":"ref19","author":"Fu","year":"2025","journal-title":"A Theoretical Perspective: How to Prevent Model Collapse in Self-consuming Training Loops"},{"key":"ref20","author":"Kazdan","year":"2025","journal-title":"Collapse or Thrive? Perils and Promises of Synthetic Data in a Self-Generating World"},{"key":"ref21","author":"He","year":"2025","journal-title":"Golden Ratio Weighting Prevents Model Collapse"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1109\/TIT.2008.928987"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1007\/b13794"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1007\/978-1-4612-1880-7_29"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1017\/9781108966351"}],"event":{"name":"2025 IEEE International Symposium on Information Theory (ISIT)","location":"Ann Arbor, MI, USA","start":{"date-parts":[[2025,6,22]]},"end":{"date-parts":[[2025,6,27]]}},"container-title":["2025 IEEE International Symposium on Information Theory (ISIT)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/11195206\/11195207\/11195231.pdf?arnumber=11195231","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,3,12]],"date-time":"2026-03-12T20:30:08Z","timestamp":1773347408000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11195231\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,6,22]]},"references-count":25,"URL":"https:\/\/doi.org\/10.1109\/isit63088.2025.11195231","relation":{},"subject":[],"published":{"date-parts":[[2025,6,22]]}}}