{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,11,5]],"date-time":"2025-11-05T21:20:06Z","timestamp":1762377606385,"version":"3.44.0"},"publisher-location":"New York, NY, USA","reference-count":14,"publisher":"ACM","license":[{"start":{"date-parts":[[2024,4,24]],"date-time":"2024-04-24T00:00:00Z","timestamp":1713916800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"funder":[{"DOI":"10.13039\/501100006374","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["Grant No. 62202128"],"award-info":[{"award-number":["Grant No. 62202128"]}],"id":[{"id":"10.13039\/501100006374","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100006374","name":"Hainan University","doi-asserted-by":"publisher","award":["Grant No. KYQD(ZR)23125"],"award-info":[{"award-number":["Grant No. KYQD(ZR)23125"]}],"id":[{"id":"10.13039\/501100006374","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2024,4,24]]},"DOI":"10.1145\/3665065.3665081","type":"proceedings-article","created":{"date-parts":[[2024,8,3]],"date-time":"2024-08-03T12:21:46Z","timestamp":1722687706000},"page":"100-104","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":1,"title":["BiCalBERT: An Efficient Transformer-based Model for Chinese Question Answering"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0009-0002-0970-7937","authenticated-orcid":false,"given":"Yanbo","family":"Han","sequence":"first","affiliation":[{"name":"School of Computer Science and Technology, Hainan University, China"}]},{"ORCID":"https:\/\/orcid.org\/0009-0008-4129-7281","authenticated-orcid":false,"given":"Buchao","family":"Zhan","sequence":"additional","affiliation":[{"name":"School of Computer Science and Technology, Hainan University, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-2511-5843","authenticated-orcid":false,"given":"Bin","family":"Zhang","sequence":"additional","affiliation":[{"name":"Department of Computer Science, City Uinversity of Hong Kong, China and \rSchool of Remote Sensing and Information Engineering, Wuhan University, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-4165-2123","authenticated-orcid":false,"given":"Chao","family":"Zhao","sequence":"additional","affiliation":[{"name":"Department of Computer Science, City University of Hong Kong, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-0369-4979","authenticated-orcid":false,"given":"Shankai","family":"Yan","sequence":"additional","affiliation":[{"name":"School of Computer Science and Technology, Hainan University, China"}]}],"member":"320","published-online":{"date-parts":[[2024,8,3]]},"reference":[{"key":"e_1_3_2_1_1_1","doi-asserted-by":"crossref","unstructured":"Hirschman Lynette and Robert Gaizauskas. \"Natural language question answering: the view from here.\" natural language engineering 7.4 (2001): 275-300.","DOI":"10.1017\/S1351324901002807"},{"key":"e_1_3_2_1_2_1","volume-title":"1877-1901","author":"Brown Tom","year":"2020","unstructured":"Brown, Tom, \"Language models are few-shot learners.\" Advances in neural information processing systems 33 (2020): 1877-1901."},{"key":"e_1_3_2_1_3_1","volume-title":"IEEE","author":"Siami-Namini Sima","year":"2019","unstructured":"Siami-Namini, Sima, Neda Tavakoli, and Akbar Siami Namin. \"The performance of LSTM and BiLSTM in forecasting time series.\" 2019 IEEE International conference on big data (Big Data). IEEE, 2019."},{"key":"e_1_3_2_1_4_1","volume-title":"A lite bert for self-supervised learning of language representations.\" arXiv preprint arXiv:1909.11942","author":"Lan Zhenzhong","year":"2019","unstructured":"Lan, Zhenzhong, \"Albert: A lite bert for self-supervised learning of language representations.\" arXiv preprint arXiv:1909.11942 (2019)."},{"key":"e_1_3_2_1_5_1","doi-asserted-by":"publisher","unstructured":"OpenAI(2023).GPT-4 Technical Report. The Philosophy of Deep Learning(2023) 1-100. https:\/\/doi.org\/10.48550\/arXiv.2303.08774","DOI":"10.48550\/arXiv.2303.08774"},{"key":"e_1_3_2_1_6_1","doi-asserted-by":"publisher","DOI":"10.1162\/153244303322533223"},{"key":"e_1_3_2_1_7_1","unstructured":"Bengio Yoshua R\u00e9jean Ducharme and Pascal Vincent. \"A neural probabilistic language model.\" Advances in neural information processing systems 13 (2000)."},{"volume-title":"In\u00a0Proceedings of the 2014 conference on empirical methods in natural language processing (EMNLP)\u00a0(pp. 1532-1543)","author":"Mikolov Tomas","key":"e_1_3_2_1_8_1","unstructured":"Mikolov, Tomas, \"Efficient estimation of word representations in vector space.\" arXiv preprint arXiv:1301.3781 (2013).Pennington, J., Socher, R., & Manning, C. D. (2014, October). Glove: Global vectors for word representation. In\u00a0Proceedings of the 2014 conference on empirical methods in natural language processing (EMNLP)\u00a0(pp. 1532-1543)."},{"key":"e_1_3_2_1_9_1","volume-title":"\"Glove: Global vectors for word representation.\" Proceedings of the 2014 conference on empirical methods in natural language processing (EMNLP)","author":"Pennington Jeffrey","year":"2014","unstructured":"Pennington, Jeffrey, Richard Socher, and Christopher D. Manning. \"Glove: Global vectors for word representation.\" Proceedings of the 2014 conference on empirical methods in natural language processing (EMNLP). 2014."},{"key":"e_1_3_2_1_10_1","volume-title":"P., Liu, X., Gao, J.","author":"Khadija K\u00fcr\u015fat","year":"2023","unstructured":"MOHAMAD, Khadija, and K\u00fcr\u015fat Mustafa KARAO\u011eLAN. \"Enhancing Deep Learning-Based Sentiment Analysis Using Static and Contextual Language Models.\" Bitlis Eren \u00dcniversitesi Fen Bilimleri Dergisi 12.3 (2023): 712-724.He, P., Liu, X., Gao, J., & Chen, W. (2020). Deberta: Decoding-enhanced bert with disentangled attention.\u00a0arXiv preprint arXiv:2006.03654."},{"key":"e_1_3_2_1_11_1","volume-title":"Decoding-enhanced bert with disentangled attention.\" arXiv preprint arXiv:2006.03654","author":"He Pengcheng","year":"2020","unstructured":"He, Pengcheng, \"Deberta: Decoding-enhanced bert with disentangled attention.\" arXiv preprint arXiv:2006.03654 (2020)."},{"key":"e_1_3_2_1_12_1","volume-title":"A robustly optimized bert pretraining approach.\" arXiv preprint arXiv:1907.11692","author":"Liu Yinhan","year":"2019","unstructured":"Liu, Yinhan, \"Roberta: A robustly optimized bert pretraining approach.\" arXiv preprint arXiv:1907.11692 (2019)."},{"key":"e_1_3_2_1_13_1","unstructured":"Bengio Yoshua R\u00e9jean Ducharme and Pascal Vincent. \"A neural probabilistic language model.\" Advances in neural information processing systems 13 (2000)."},{"key":"e_1_3_2_1_14_1","unstructured":"Vaswani Ashish \"Attention is all you need.\" Advances in neural information processing systems 30 (2017)."}],"event":{"name":"ISMSI 2024: 2024 8th International Conference on Intelligent Systems, Metaheuristics & Swarm Intelligence","acronym":"ISMSI 2024","location":"Singapore Singapore"},"container-title":["2024 8th International Conference on Intelligent Systems Metaheuristics &amp; Swarm Intelligence (ISMSI)"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3665065.3665081","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3665065.3665081","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,8,26]],"date-time":"2025-08-26T19:21:41Z","timestamp":1756236101000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3665065.3665081"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,4,24]]},"references-count":14,"alternative-id":["10.1145\/3665065.3665081","10.1145\/3665065"],"URL":"https:\/\/doi.org\/10.1145\/3665065.3665081","relation":{},"subject":[],"published":{"date-parts":[[2024,4,24]]},"assertion":[{"value":"2024-08-03","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}