{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,6,19]],"date-time":"2025-06-19T05:06:29Z","timestamp":1750309589846,"version":"3.41.0"},"publisher-location":"New York, NY, USA","reference-count":11,"publisher":"ACM","license":[{"start":{"date-parts":[[2024,10,17]],"date-time":"2024-10-17T00:00:00Z","timestamp":1729123200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2024,10,17]]},"DOI":"10.1145\/3723178.3723235","type":"proceedings-article","created":{"date-parts":[[2025,6,6]],"date-time":"2025-06-06T07:16:47Z","timestamp":1749194207000},"page":"429-434","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":0,"title":["Suggesting Bangla Words using Masked Language Model"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0009-0008-2203-4179","authenticated-orcid":false,"given":"Dhiman","family":"Bose","sequence":"first","affiliation":[{"name":"Khulna University of Engineerinf &amp; Technology, Khulna, Khulna, Bangladesh"}]},{"ORCID":"https:\/\/orcid.org\/0009-0001-7094-757X","authenticated-orcid":false,"given":"Md. Shahidul","family":"Salim","sequence":"additional","affiliation":[{"name":"Khulna University of Engineering &amp; Technology, Khulna, Bangladesh"}]}],"member":"320","published-online":{"date-parts":[[2025,6,6]]},"reference":[{"doi-asserted-by":"crossref","unstructured":"Abhik Bhattacharjee Tahmid Hasan Wasi\u00a0Uddin Ahmad Kazi Samin Md\u00a0Saiful Islam Anindya Iqbal M.\u00a0Sohel Rahman and Rifat Shahriyar. 2022. BanglaBERT: Language Model Pretraining and Benchmarks for Low-Resource Language Understanding Evaluation in Bangla. arxiv:https:\/\/arXiv.org\/abs\/2101.00204\u00a0[cs.CL] https:\/\/arxiv.org\/abs\/2101.00204","key":"e_1_3_3_1_2_2","DOI":"10.18653\/v1\/2022.findings-naacl.98"},{"unstructured":"Kevin Clark Minh-Thang Luong Quoc\u00a0V. Le and Christopher\u00a0D. Manning. 2020. ELECTRA: Pre-training Text Encoders as Discriminators Rather Than Generators. arxiv:https:\/\/arXiv.org\/abs\/2003.10555\u00a0[cs.CL] https:\/\/arxiv.org\/abs\/2003.10555","key":"e_1_3_3_1_3_2"},{"doi-asserted-by":"publisher","key":"e_1_3_3_1_4_2","DOI":"10.18653\/v1\/2020.findings-emnlp.58"},{"doi-asserted-by":"publisher","unstructured":"Yiming Cui Wanxiang Che Ting Liu Bing Qin and Ziqing Yang. 2021. Pre-Training With Whole Word Masking for Chinese BERT. IEEE\/ACM Transactions on Audio Speech and Language Processing 29 (2021) 3504\u20133514. 10.1109\/TASLP.2021.3124365","key":"e_1_3_3_1_5_2","DOI":"10.1109\/TASLP.2021.3124365"},{"doi-asserted-by":"publisher","key":"e_1_3_3_1_6_2","DOI":"10.18653\/v1\/N19-1423"},{"unstructured":"Jacob Devlin Ming-Wei Chang Kenton Lee and Kristina Toutanova. 2019. BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding. arxiv:https:\/\/arXiv.org\/abs\/1810.04805\u00a0[cs.CL] https:\/\/arxiv.org\/abs\/1810.04805","key":"e_1_3_3_1_7_2"},{"unstructured":"Mandar Joshi Danqi Chen Yinhan Liu Daniel\u00a0S. Weld Luke Zettlemoyer and Omer Levy. 2020. SpanBERT: Improving Pre-training by Representing and Predicting Spans. arxiv:https:\/\/arXiv.org\/abs\/1907.10529\u00a0[cs.CL] https:\/\/arxiv.org\/abs\/1907.10529","key":"e_1_3_3_1_8_2"},{"unstructured":"Yinhan Liu Myle Ott Naman Goyal Jingfei Du Mandar Joshi Danqi Chen Omer Levy Mike Lewis Luke Zettlemoyer and Veselin Stoyanov. 2019. RoBERTa: A Robustly Optimized BERT Pretraining Approach. arxiv:https:\/\/arXiv.org\/abs\/1907.11692\u00a0[cs.CL] https:\/\/arxiv.org\/abs\/1907.11692","key":"e_1_3_3_1_9_2"},{"unstructured":"Debora Nozza Federico Bianchi and Dirk Hovy. 2020. What the [MASK]? Making Sense of Language-Specific BERT Models. arxiv:https:\/\/arXiv.org\/abs\/2003.02912\u00a0[cs.CL] https:\/\/arxiv.org\/abs\/2003.02912","key":"e_1_3_3_1_10_2"},{"doi-asserted-by":"publisher","key":"e_1_3_3_1_11_2","DOI":"10.1007\/978-981-99-8937-911"},{"unstructured":"Yu Sun Shuohuan Wang Yukun Li Shikun Feng Xuyi Chen Han Zhang Xin Tian Danxiang Zhu Hao Tian and Hua Wu. 2019. ERNIE: Enhanced Representation through Knowledge Integration. arxiv:https:\/\/arXiv.org\/abs\/1904.09223\u00a0[cs.CL] https:\/\/arxiv.org\/abs\/1904.09223","key":"e_1_3_3_1_12_2"}],"event":{"acronym":"ICCA 2024","name":"ICCA 2024: 3rd International Conference on Computing Advancements","location":"Dhaka Bangladesh"},"container-title":["Proceedings of the 3rd International Conference on Computing Advancements"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3723178.3723235","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3723178.3723235","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,19]],"date-time":"2025-06-19T01:56:47Z","timestamp":1750298207000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3723178.3723235"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,10,17]]},"references-count":11,"alternative-id":["10.1145\/3723178.3723235","10.1145\/3723178"],"URL":"https:\/\/doi.org\/10.1145\/3723178.3723235","relation":{},"subject":[],"published":{"date-parts":[[2024,10,17]]},"assertion":[{"value":"2025-06-06","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}