{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,6,20]],"date-time":"2025-06-20T04:09:32Z","timestamp":1750392572629,"version":"3.41.0"},"publisher-location":"Singapore","reference-count":23,"publisher":"Springer Nature Singapore","isbn-type":[{"value":"9789819682942","type":"print"},{"value":"9789819682959","type":"electronic"}],"license":[{"start":{"date-parts":[[2025,1,1]],"date-time":"2025-01-01T00:00:00Z","timestamp":1735689600000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2025,1,1]],"date-time":"2025-01-01T00:00:00Z","timestamp":1735689600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025]]},"DOI":"10.1007\/978-981-96-8295-9_15","type":"book-chapter","created":{"date-parts":[[2025,6,19]],"date-time":"2025-06-19T17:47:16Z","timestamp":1750355236000},"page":"210-222","update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":0,"title":["CNO-Former: Chaotic Neural Oscillatory Transformer for Social Media Text Generation"],"prefix":"10.1007","author":[{"given":"Nuobei","family":"Shi","sequence":"first","affiliation":[]},{"given":"Jiapeng","family":"Yu","sequence":"additional","affiliation":[]},{"given":"Hengyi","family":"Luo","sequence":"additional","affiliation":[]},{"given":"Jin","family":"Ma","sequence":"additional","affiliation":[]},{"given":"Raymond","family":"Lee","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2025,6,20]]},"reference":[{"key":"15_CR1","unstructured":"Wang, S., Li, B.Z., Khabsa, M., Fang, H., Ma, H.: Linformer: self-attention with linear complexity. arXiv preprint arXiv:2006.04768 (2020)"},{"key":"15_CR2","unstructured":"Choromanski, K., et al.: Rethinking attention with performers. arXiv preprint arXiv:2009.14794 (2020)"},{"key":"15_CR3","unstructured":"Kitaev, N., Kaiser, \u0141., Levskaya, A.: Reformer: the efficient transformer. arXiv preprint arXiv:2001.04451 (2020)"},{"key":"15_CR4","unstructured":"Beltagy, I., Peters, M.E., Cohan, A.: Longformer: the long-document transformer. arXiv preprint arXiv:2004.05150 (2020)"},{"key":"15_CR5","doi-asserted-by":"publisher","first-page":"731","DOI":"10.1109\/TFUZZ.2019.2914642","volume":"28","author":"RST Lee","year":"2020","unstructured":"Lee, R.S.T.: Chaotic type-2 transient-fuzzy deep neuro-oscillatory network for worldwide financial prediction. IEEE Trans. Fuzzy Syst. 28, 731\u2013745 (2020)","journal-title":"IEEE Trans. Fuzzy Syst."},{"key":"15_CR6","doi-asserted-by":"crossref","unstructured":"Li, J., Song, H., Li, J.: Transformer-based question text generation in the learning system. In: Proceedings of the 6th International Conference on Innovation in Artificial Intelligence, pp. 50\u201356. ACM, New York (2022)","DOI":"10.1145\/3529466.3529484"},{"key":"15_CR7","doi-asserted-by":"crossref","unstructured":"Papineni, K., Roukos, S., Ward, T., Zhu, W.-J.: BLEU: a method for automatic evaluation of machine translation. In: Proceedings of the 40th Annual Meeting of the Association for Computational Linguistics, pp. 311\u2013318. ACL, Morristown (2001)","DOI":"10.3115\/1073083.1073135"},{"key":"15_CR8","doi-asserted-by":"crossref","unstructured":"Zhang, H., Song, H., Li, S., Zhou, M., Song, D.: A survey of controllable text generation using transformer-based pre-trained language models. arXiv preprint arXiv:2201.05337 (2022)","DOI":"10.1145\/3617680"},{"key":"15_CR9","unstructured":"Li, J., Tang, T., Zhao, W.X., Nie, J., Wen, J.: Pretrained language models for text generation: a survey. arXiv preprint arXiv:2201.05273 (2022)"},{"key":"15_CR10","unstructured":"Chung, J., Gulcehre, C., Cho, K., Bengio, Y.: Empirical evaluation of gated recurrent neural networks on sequence modeling. arXiv preprint arXiv:1412.3555 (2014)"},{"key":"15_CR11","doi-asserted-by":"publisher","first-page":"1735","DOI":"10.1162\/neco.1997.9.8.1735","volume":"9","author":"S Hochreiter","year":"1997","unstructured":"Hochreiter, S., Schmidhuber, J.: Long short-term memory. Neural Comput. 9, 1735\u20131780 (1997)","journal-title":"Neural Comput."},{"key":"15_CR12","doi-asserted-by":"publisher","first-page":"2515","DOI":"10.1016\/j.jksuci.2020.04.001","volume":"34","author":"T Iqbal","year":"2022","unstructured":"Iqbal, T., Qureshi, S.: The survey: text generation models in deep learning. J. King Saud Univ. Comput. Inf. Sci. 34, 2515\u20132528 (2022)","journal-title":"J. King Saud Univ. Comput. Inf. Sci."},{"key":"15_CR13","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1145\/3530811","volume":"55","author":"Y Tay","year":"2023","unstructured":"Tay, Y., Dehghani, M., Bahri, D., Metzler, D.: Efficient transformers: a survey. ACM Comput. Surv. 55, 1\u201328 (2023)","journal-title":"ACM Comput. Surv."},{"key":"15_CR14","unstructured":"Zaheer, M., et al.: Big bird: transformers for longer sequences. In: Advances in Neural Information Processing Systems (2020). arXiv:2007.14062"},{"key":"15_CR15","unstructured":"Ho, J., Kalchbrenner, N., Weissenborn, D., Salimans, T.: Axial attention in multidimensional transformers. arXiv preprint arXiv:1912.12180 (2019)"},{"key":"15_CR16","unstructured":"Tay, Y., Bahri, D., Metzler, D., Juan, D.-C., Zhao, Z., Zheng, C.: Synthesizer: rethinking self-attention in transformer models. arXiv preprint arXiv:2005.00743 (2020)"},{"key":"15_CR17","unstructured":"Rae, J.W., Potapenko, A., Jayakumar, S.M., Lillicrap, T.P.: Compressive transformers for long-range sequence modelling. arXiv preprint arXiv:1911.05507 (2019)"},{"key":"15_CR18","unstructured":"Lee, J., Lee, Y., Kim, J., Kosiorek, A.R., Choi, S., Teh, Y.W.: Set transformer: a framework for attention-based permutation-invariant neural networks. arXiv:1810.00825 (2018)"},{"key":"15_CR19","unstructured":"Vaswani, A., et al.: Attention is all you need. In: Proceedings of the 31st International Conference on Neural Information Processing Systems, pp. 6000\u20136010 (2017)"},{"key":"15_CR20","doi-asserted-by":"publisher","first-page":"1228","DOI":"10.1109\/TNN.2004.832729","volume":"15","author":"RST Lee","year":"2004","unstructured":"Lee, R.S.T.: A transient-chaotic autoassociative network (TCAN) based on lee oscillators. IEEE Trans. Neural Netw. 15, 1228\u20131243 (2004)","journal-title":"IEEE Trans. Neural Netw."},{"key":"15_CR21","unstructured":"Li, Y., Su, H., Shen, X., Li, W., Cao, Z., Niu, S.: DailyDialog: a manually labelled multi-turn dialogue dataset. arXiv preprint arXiv:1710.03957 (2017)"},{"key":"15_CR22","unstructured":"Dinan, E., Roller, S., Shuster, K., Fan, A., Auli, M., Weston, J.: Wizard of Wikipedia: knowledge-powered conversational agents. In: 7th International Conference on Learning Representations (ICLR 2019) (2018). arXiv:1811.01241"},{"key":"15_CR23","doi-asserted-by":"crossref","unstructured":"Ren, D., Cai, Y., Chan, W.H., Li, Z.: A Clustering Based Adaptive Sequence-to-Sequence Model for Dialogue Systems. In: 2018 IEEE International Conference on Big Data and Smart Computing, pp. 775\u2013781 (2018)","DOI":"10.1109\/BigComp.2018.00148"}],"container-title":["Lecture Notes in Computer Science","Data Science: Foundations and Applications"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/978-981-96-8295-9_15","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,19]],"date-time":"2025-06-19T17:47:23Z","timestamp":1750355243000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/978-981-96-8295-9_15"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025]]},"ISBN":["9789819682942","9789819682959"],"references-count":23,"URL":"https:\/\/doi.org\/10.1007\/978-981-96-8295-9_15","relation":{},"ISSN":["0302-9743","1611-3349"],"issn-type":[{"value":"0302-9743","type":"print"},{"value":"1611-3349","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025]]},"assertion":[{"value":"20 June 2025","order":1,"name":"first_online","label":"First Online","group":{"name":"ChapterHistory","label":"Chapter History"}},{"value":"PAKDD","order":1,"name":"conference_acronym","label":"Conference Acronym","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Pacific-Asia Conference on Knowledge Discovery and Data Mining","order":2,"name":"conference_name","label":"Conference Name","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Sydney, NSW","order":3,"name":"conference_city","label":"Conference City","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Australia","order":4,"name":"conference_country","label":"Conference Country","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"2025","order":5,"name":"conference_year","label":"Conference Year","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"10 June 2025","order":7,"name":"conference_start_date","label":"Conference Start Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"13 June 2025","order":8,"name":"conference_end_date","label":"Conference End Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"29","order":9,"name":"conference_number","label":"Conference Number","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"pakdd2025","order":10,"name":"conference_id","label":"Conference ID","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"https:\/\/pakdd2025.org\/","order":11,"name":"conference_url","label":"Conference URL","group":{"name":"ConferenceInfo","label":"Conference Information"}}]}}