{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,25]],"date-time":"2026-03-25T06:16:55Z","timestamp":1774419415179,"version":"3.50.1"},"reference-count":22,"publisher":"IEEE","license":[{"start":{"date-parts":[[2025,4,6]],"date-time":"2025-04-06T00:00:00Z","timestamp":1743897600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,4,6]],"date-time":"2025-04-06T00:00:00Z","timestamp":1743897600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025,4,6]]},"DOI":"10.1109\/icassp49660.2025.10890215","type":"proceedings-article","created":{"date-parts":[[2025,3,12]],"date-time":"2025-03-12T17:15:02Z","timestamp":1741799702000},"page":"1-5","source":"Crossref","is-referenced-by-count":0,"title":["Found In The Distribution: Utilizing Latent Dirichlet Allocation Improves Long Context Comprehension of Large Language Models"],"prefix":"10.1109","author":[{"given":"Zhenyu","family":"Guan","sequence":"first","affiliation":[{"name":"Renmin University of China,CHN,School of Information,Beijing,China"}]},{"given":"Xun","family":"Liang","sequence":"additional","affiliation":[{"name":"Renmin University of China,CHN,School of Information,Beijing,China"}]},{"given":"Sensen","family":"Zhang","sequence":"additional","affiliation":[{"name":"Renmin University of China,CHN,School of Information,Beijing,China"}]}],"member":"263","reference":[{"key":"ref1","article-title":"Lamda: Language models for dialog applications","author":"Thoppilan","year":"2022","journal-title":"CoRR"},{"key":"ref2","article-title":"SILO language models: Isolating legal risk in a nonparametric datastore","volume-title":"The Twelfth International Conference on Learning Representations, ICLR 2024, Vienna, Austria, May 7-11, 2024","author":"Min"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2021.eacl-main.74"},{"key":"ref4","article-title":"Tool documentation enables zero-shot tool-usage with large language models","author":"Hsieh","year":"2023","journal-title":"CoRR"},{"key":"ref5","article-title":"Self-rag: Learning to retrieve, generate, and critique through self-reflection","volume-title":"The Twelfth International Conference on Learning Representations, ICLR 2024, Vienna, Austria, May 7-11, 2024","author":"Asai"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1612"},{"key":"ref7","article-title":"Chain-of-table: Evolving tables in the reasoning chain for table understanding","volume-title":"The Twelfth International Conference on Learning Representations, ICLR 2024, Vienna, Austria, May 7-11, 2024","author":"Wang"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.emnlp-main.550"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1162\/tacl_a_00638"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2024.findings-acl.890"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/n18-2097"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.3115\/1073083.1073135"},{"key":"ref13","first-page":"74","article-title":"ROUGE: A package for automatic evaluation of summaries","volume-title":"Text Summarization Branches Out","author":"Lin","year":"2004"},{"key":"ref14","first-page":"2","article-title":"Bert: Pre-training of deep bidirectional transformers for language understanding","volume-title":"Proceedings of naacL-HLT","volume":"1","author":"Ming-Wei Chang Kenton"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1525\/9780520940420-020"},{"key":"ref16","first-page":"140:1","article-title":"Exploring the limits of transfer learning with a unified text-to-text transformer","volume":"21","author":"Raffel","year":"2020","journal-title":"J. Mach. Learn. Res"},{"key":"ref17","first-page":"11328","article-title":"PEGASUS: pre-training with extracted gap-sentences for abstractive summarization","volume-title":"Proceedings of the 37th International Conference on Machine Learning, ICML 2020, 13-18 July 2020","volume":"119","author":"Zhang"},{"key":"ref18","article-title":"Roberta: A robustly optimized BERT pretraining approach","author":"Liu","year":"2019","journal-title":"CoRR"},{"key":"ref19","first-page":"21","article-title":"Towards human-centered summarization: A case study on financial news","volume-title":"Proceedings of the First Workshop on Bridging Human\u2013Computer Interaction and Natural Language Processing","author":"Passali"},{"key":"ref20","article-title":"Evaluation of faithfulness using the longest supported subsequence","author":"Mittal","year":"2023","journal-title":"CoRR"},{"key":"ref21","article-title":"Big bird: Transformers for longer sequences","volume-title":"Advances in Neural Information Processing Systems 33: Annual Conference on Neural Information Processing Systems 2020, NeurIPS 2020, December 6-12, 2020","author":"Zaheer"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v38i21.30433"}],"event":{"name":"ICASSP 2025 - 2025 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)","location":"Hyderabad, India","start":{"date-parts":[[2025,4,6]]},"end":{"date-parts":[[2025,4,11]]}},"container-title":["ICASSP 2025 - 2025 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/10887540\/10887541\/10890215.pdf?arnumber=10890215","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,3,25]],"date-time":"2026-03-25T05:22:46Z","timestamp":1774416166000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10890215\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,4,6]]},"references-count":22,"URL":"https:\/\/doi.org\/10.1109\/icassp49660.2025.10890215","relation":{},"subject":[],"published":{"date-parts":[[2025,4,6]]}}}