{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,6,10]],"date-time":"2025-06-10T05:05:37Z","timestamp":1749531937665,"version":"3.40.3"},"publisher-location":"Singapore","reference-count":18,"publisher":"Springer Nature Singapore","isbn-type":[{"type":"print","value":"9789819757787"},{"type":"electronic","value":"9789819757794"}],"license":[{"start":{"date-parts":[[2025,1,1]],"date-time":"2025-01-01T00:00:00Z","timestamp":1735689600000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2025,1,1]],"date-time":"2025-01-01T00:00:00Z","timestamp":1735689600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025]]},"DOI":"10.1007\/978-981-97-5779-4_27","type":"book-chapter","created":{"date-parts":[[2025,1,10]],"date-time":"2025-01-10T07:16:25Z","timestamp":1736493385000},"page":"406-415","update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":1,"title":["LongSum: An Efficient Transformer for Long Document Summarization"],"prefix":"10.1007","author":[{"given":"Jitong","family":"Wei","sequence":"first","affiliation":[]},{"given":"Yang","family":"Gao","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2025,1,11]]},"reference":[{"key":"27_CR1","unstructured":"Beltagy, I., Peters, M.E., Cohan, A.: Longformer: The long-document transformer. arXiv preprint arXiv:2004.05150 (2020)"},{"key":"27_CR2","doi-asserted-by":"crossref","unstructured":"Cao, S., Wang, L.: Hibrids: Attention with hierarchical biases for structure-aware long document summarization. In: Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers). pp. 786\u2013807 (2022)","DOI":"10.18653\/v1\/2022.acl-long.58"},{"key":"27_CR3","doi-asserted-by":"crossref","unstructured":"Cohan, A., Dernoncourt, F., Kim, D.S., Bui, T., Kim, S., Chang, W., Goharian, N.: A discourse-aware attention model for abstractive summarization of long documents. In: Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 2 (Short Papers). pp. 615\u2013621 (2018)","DOI":"10.18653\/v1\/N18-2097"},{"key":"27_CR4","doi-asserted-by":"publisher","first-page":"3029","DOI":"10.1109\/TASLP.2020.3037401","volume":"28","author":"A Gidiotis","year":"2020","unstructured":"Gidiotis, A., Tsoumakas, G.: A divide-and-conquer approach to the summarization of long documents. IEEE\/ACM Transactions on Audio, Speech, and Language Processing 28, 3029\u20133040 (2020)","journal-title":"IEEE\/ACM Transactions on Audio, Speech, and Language Processing"},{"key":"27_CR5","doi-asserted-by":"crossref","unstructured":"Guo, M., Ainslie, J., Uthus, D.C., Ontanon, S., Ni, J., Sung, Y.H., Yang, Y.: Longt5: Efficient text-to-text transformer for long sequences. In: Findings of the Association for Computational Linguistics: NAACL 2022. pp. 724\u2013736 (2022)","DOI":"10.18653\/v1\/2022.findings-naacl.55"},{"key":"27_CR6","doi-asserted-by":"crossref","unstructured":"Huang, L., Cao, S., Parulian, N., Ji, H., Wang, L.: Efficient attentions for long document summarization. In: Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies. pp. 1419\u20131436 (2021)","DOI":"10.18653\/v1\/2021.naacl-main.112"},{"key":"27_CR7","doi-asserted-by":"crossref","unstructured":"Lewis, M., Liu, Y., Goyal, N., Ghazvininejad, M., Mohamed, A., Levy, O., Stoyanov, V., Zettlemoyer, L.: Bart: Denoising sequence-to-sequence pre-training for natural language generation, translation, and comprehension. In: Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics. pp. 7871\u20137880 (2020)","DOI":"10.18653\/v1\/2020.acl-main.703"},{"key":"27_CR8","doi-asserted-by":"crossref","unstructured":"Manakul, P., Gales, M.: Long-span summarization via local attention and content selection. In: Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing (Volume 1: Long Papers). pp. 6026\u20136041 (2021)","DOI":"10.18653\/v1\/2021.acl-long.470"},{"key":"27_CR9","doi-asserted-by":"crossref","unstructured":"Narayan, S., Cohen, S.B., Lapata, M.: Ranking sentences for extractive summarization with reinforcement learning. In: Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long Papers). pp. 1747\u20131759 (2018)","DOI":"10.18653\/v1\/N18-1158"},{"key":"27_CR10","doi-asserted-by":"crossref","unstructured":"Pang, B., Nijkamp, E., Kry\u015bci\u0144ski, W., Savarese, S., Zhou, Y., Xiong, C.: Long document summarization with top-down and bottom-up inference. In: Findings of the Association for Computational Linguistics: EACL 2023. pp. 1267\u20131284 (2023)","DOI":"10.18653\/v1\/2023.findings-eacl.94"},{"key":"27_CR11","doi-asserted-by":"crossref","unstructured":"Pilault, J., Li, R., Subramanian, S., Pal, C.: On extractive and abstractive neural document summarization with transformer language models. In: Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP). pp. 9308\u20139319 (2020)","DOI":"10.18653\/v1\/2020.emnlp-main.748"},{"key":"27_CR12","unstructured":"Qin, Z., Sun, W., Deng, H., Li, D., Wei, Y., Lv, B., Yan, J., Kong, L., Zhong, Y.: cosformer: Rethinking softmax in attention. In: International Conference on Learning Representations (2021)"},{"key":"27_CR13","doi-asserted-by":"crossref","unstructured":"Qiu, J., Ma, H., Levy, O., Yih, W.t., Wang, S., Tang, J.: Blockwise self-attention for long document understanding. In: Findings of the Association for Computational Linguistics: EMNLP 2020. pp. 2555\u20132565 (2020)","DOI":"10.18653\/v1\/2020.findings-emnlp.232"},{"issue":"140","key":"27_CR14","first-page":"1","volume":"21","author":"C Raffel","year":"2020","unstructured":"Raffel, C., Shazeer, N., Roberts, A., Lee, K., Narang, S., Matena, M., Zhou, Y., Li, W., Liu, P.J.: Exploring the limits of transfer learning with a unified text-to-text transformer. Journal of machine learning research 21(140), 1\u201367 (2020)","journal-title":"Journal of machine learning research"},{"key":"27_CR15","unstructured":"Tay, Y., Dehghani, M., Bahri, D., Metzler, D.: Efficient transformers: A survey. ACM Computing Surveys (CSUR) (2020)"},{"key":"27_CR16","unstructured":"Vaswani, A., Shazeer, N., Parmar, N., Uszkoreit, J., Jones, L., Gomez, A.N., Kaiser, \u0141., Polosukhin, I.: Attention is all you need. Advances in neural information processing systems 30 (2017)"},{"key":"27_CR17","unstructured":"Wang, S., Li, B.Z., Khabsa, M., Fang, H., Ma, H.: Linformer: Self-attention with linear complexity. arXiv preprint arXiv:2006.04768 (2020)"},{"key":"27_CR18","doi-asserted-by":"crossref","unstructured":"Zheng, H., Lapata, M.: Sentence centrality revisited for unsupervised summarization. In: Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics. pp. 6236\u20136247 (2019)","DOI":"10.18653\/v1\/P19-1628"}],"container-title":["Lecture Notes in Computer Science","Database Systems for Advanced Applications"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/978-981-97-5779-4_27","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,1,10]],"date-time":"2025-01-10T08:10:12Z","timestamp":1736496612000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/978-981-97-5779-4_27"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025]]},"ISBN":["9789819757787","9789819757794"],"references-count":18,"URL":"https:\/\/doi.org\/10.1007\/978-981-97-5779-4_27","relation":{},"ISSN":["0302-9743","1611-3349"],"issn-type":[{"type":"print","value":"0302-9743"},{"type":"electronic","value":"1611-3349"}],"subject":[],"published":{"date-parts":[[2025]]},"assertion":[{"value":"11 January 2025","order":1,"name":"first_online","label":"First Online","group":{"name":"ChapterHistory","label":"Chapter History"}},{"value":"DASFAA","order":1,"name":"conference_acronym","label":"Conference Acronym","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"International Conference on Database Systems for Advanced Applications","order":2,"name":"conference_name","label":"Conference Name","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Gifu","order":3,"name":"conference_city","label":"Conference City","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Japan","order":4,"name":"conference_country","label":"Conference Country","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"2024","order":5,"name":"conference_year","label":"Conference Year","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"2 July 2024","order":7,"name":"conference_start_date","label":"Conference Start Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"5 July 2024","order":8,"name":"conference_end_date","label":"Conference End Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"15","order":9,"name":"conference_number","label":"Conference Number","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"dasfaa2024a","order":10,"name":"conference_id","label":"Conference ID","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"https:\/\/www.dasfaa2024.org\/","order":11,"name":"conference_url","label":"Conference URL","group":{"name":"ConferenceInfo","label":"Conference Information"}}]}}