{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,2,21]],"date-time":"2025-02-21T21:14:01Z","timestamp":1740172441361,"version":"3.37.3"},"reference-count":56,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2024,1,1]],"date-time":"2024-01-01T00:00:00Z","timestamp":1704067200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2024,1,1]],"date-time":"2024-01-01T00:00:00Z","timestamp":1704067200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,1,1]],"date-time":"2024-01-01T00:00:00Z","timestamp":1704067200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"name":"National Key Research and Development Program of China","award":["2022YFF0902701"],"award-info":[{"award-number":["2022YFF0902701"]}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["U21A20468","61921003","61972043","U22A201339","62202065","62302053"],"award-info":[{"award-number":["U21A20468","61921003","61972043","U22A201339","62202065","62302053"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Zhejiang Lab","award":["2021PD0AB02"],"award-info":[{"award-number":["2021PD0AB02"]}]},{"name":"Key R&amp;D Program of Zhejiang","award":["2022C04006"],"award-info":[{"award-number":["2022C04006"]}]},{"DOI":"10.13039\/501100012226","name":"Fundamental Research Funds for the Central Universities","doi-asserted-by":"publisher","award":["2020XD-A07-1"],"award-info":[{"award-number":["2020XD-A07-1"]}],"id":[{"id":"10.13039\/501100012226","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE\/ACM Trans. Audio Speech Lang. Process."],"published-print":{"date-parts":[[2024]]},"DOI":"10.1109\/taslp.2023.3340610","type":"journal-article","created":{"date-parts":[[2023,12,7]],"date-time":"2023-12-07T19:46:16Z","timestamp":1701978376000},"page":"853-867","source":"Crossref","is-referenced-by-count":3,"title":["FluGCF: A Fluent Dialogue Generation Model With Coherent Concept Entity Flow"],"prefix":"10.1109","volume":"32","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-2151-5420","authenticated-orcid":false,"given":"Yaru","family":"Zhao","sequence":"first","affiliation":[{"name":"State Key Laboratory of Networking and Switching Technology, Beijing University of Posts and Telecommunications, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-2160-2839","authenticated-orcid":false,"given":"Bo","family":"Cheng","sequence":"additional","affiliation":[{"name":"State Key Laboratory of Networking and Switching Technology, Beijing University of Posts and Telecommunications, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-4051-0200","authenticated-orcid":false,"given":"Yakun","family":"Huang","sequence":"additional","affiliation":[{"name":"State Key Laboratory of Networking and Switching Technology, Beijing University of Posts and Telecommunications, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-1319-1224","authenticated-orcid":false,"given":"Zhiguo","family":"Wan","sequence":"additional","affiliation":[{"name":"Zhejiang Lab, Hangzhou, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.3115\/v1\/P15-1152"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1565"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/W15-4616"},{"key":"ref4","first-page":"3370","article-title":"A neural network approach for knowledge-driven response generation","volume-title":"Proc. 26th Int. Conf. Comput. Linguistics: Tech. Papers","author":"Vougiouklis","year":"2016"},{"key":"ref5","first-page":"1815","article-title":"Generating informative and diverse conversational responses via adversarial information maximization","volume-title":"Proc. 32nd Int. Conf. Neural Inf. Process. Syst.","author":"Zhang","year":"2018"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1598"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.24963\/ijcai.2018\/643"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-main.184"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1109\/IJCNN.2017.7966297"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v32i1.11923"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1145\/3512467"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v31i1.10981"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/N18-5020"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.24963\/ijcai.2021\/593"},{"issue":"8","key":"ref15","article-title":"Language models are unsupervised multitask learners","volume":"1","author":"Radford","year":"2019","journal-title":"OpenAI Blog"},{"key":"ref16","first-page":"1","article-title":"Exploring the limits of transfer learning with a unified text-to-text transformer","volume":"21","author":"Raffel","year":"2020","journal-title":"J. Mach. Learn. Res."},{"article-title":"Scaling instruction-finetuned language models","year":"2022","author":"Chung","key":"ref17"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.3115\/v1\/d14-1179"},{"key":"ref19","first-page":"2787","article-title":"Translating embeddings for modeling multi-relational data","volume-title":"Proc. 26th Int. Conf. Neural Inf. Process. Syst.","author":"Bordes","year":"2013"},{"key":"ref20","first-page":"1","article-title":"Neural machine translation by jointly learning to align and translate","volume-title":"Proc. 3rd Int. Conf. Learn. Representations","author":"Bahdanau","year":"2015"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v31i1.11164"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v34i05.6298"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1145\/219717.219748"},{"key":"ref24","first-page":"3104","article-title":"Sequence to sequence learning with neural networks","volume-title":"Proc. 27th Int. Conf. Neural Inf. Process. Syst.","author":"Sutskever","year":"2014"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v32i1.11977"},{"article-title":"Flexible end-to-end dialogue system for knowledge grounded conversation","year":"2017","author":"Zhu","key":"ref26"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-main.515"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v31i1.10983"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.3115\/1073083.1073135"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.3115\/1289189.1289273"},{"key":"ref31","first-page":"74","article-title":"ROUGE: A package for automatic evaluation of summaries","volume-title":"Proc. Workshop Text Summarization Branches Out","author":"Lin","year":"2004"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.3115\/1626355.1626389"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-main.333"},{"key":"ref34","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/N16-1014"},{"key":"ref35","first-page":"3349","article-title":"Sequence to backward and forward sequences: A content-introducing approach to generative short-text conversation","volume-title":"Proc. 26th Int. Conf. Comput. Linguistics: Tech. Papers","author":"Mou","year":"2016"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-00671-6_37"},{"key":"ref37","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.emnlp-main.742"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.24963\/ijcai.2020\/545"},{"key":"ref39","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D19-1187"},{"article-title":"How close is ChatGPT to human experts? Comparison corpus, evaluation, and detection","year":"2023","author":"Guo","key":"ref40"},{"key":"ref41","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-acl.275"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-acl.29"},{"key":"ref43","doi-asserted-by":"publisher","DOI":"10.1145\/3366423.3380193"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.3115\/v1\/N15-1020"},{"key":"ref45","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P16-1154"},{"key":"ref46","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1538"},{"key":"ref47","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D16-1127"},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P18-1138"},{"key":"ref49","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.emnlp-main.54"},{"key":"ref50","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i7.16796"},{"key":"ref51","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.emnlp-main.147"},{"key":"ref52","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2021.emnlp-main.353"},{"key":"ref53","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.findings-naacl.195"},{"key":"ref54","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-demos.30"},{"article-title":"Towards a human-like open-domain chatbot","year":"2020","author":"Adiwardana","key":"ref55"},{"key":"ref56","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.acl-long.579"}],"container-title":["IEEE\/ACM Transactions on Audio, Speech, and Language Processing"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/6570655\/10304349\/10347393.pdf?arnumber=10347393","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,1,12]],"date-time":"2024-01-12T03:37:58Z","timestamp":1705030678000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10347393\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024]]},"references-count":56,"URL":"https:\/\/doi.org\/10.1109\/taslp.2023.3340610","relation":{},"ISSN":["2329-9290","2329-9304"],"issn-type":[{"type":"print","value":"2329-9290"},{"type":"electronic","value":"2329-9304"}],"subject":[],"published":{"date-parts":[[2024]]}}}