{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,6,19]],"date-time":"2025-06-19T05:06:30Z","timestamp":1750309590328,"version":"3.41.0"},"publisher-location":"New York, NY, USA","reference-count":27,"publisher":"ACM","license":[{"start":{"date-parts":[[2024,10,17]],"date-time":"2024-10-17T00:00:00Z","timestamp":1729123200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2024,10,17]]},"DOI":"10.1145\/3723178.3723231","type":"proceedings-article","created":{"date-parts":[[2025,6,6]],"date-time":"2025-06-06T07:16:47Z","timestamp":1749194207000},"page":"399-405","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":0,"title":["A Comparative Study of ParsBERT and mBERT in Emotion Recognition for Dari-Farsi Text with Explainable AI"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0009-0008-2993-6458","authenticated-orcid":false,"given":"Malika","family":"Muradi","sequence":"first","affiliation":[{"name":"Brac University, Dhaka, Bangladesh"}]},{"ORCID":"https:\/\/orcid.org\/0009-0003-7392-1812","authenticated-orcid":false,"given":"Basit","family":"Hussain","sequence":"additional","affiliation":[{"name":"Brac University, Dhaka, Bangladesh"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-4641-508X","authenticated-orcid":false,"given":"Ehsanur Rahman","family":"Rhythm","sequence":"additional","affiliation":[{"name":"BRAC University, Dhaka, Bangladesh"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-0198-3734","authenticated-orcid":false,"given":"Annajiat Alim","family":"Rasel","sequence":"additional","affiliation":[{"name":"BRAC University, Dhaka, Bangladesh"}]}],"member":"320","published-online":{"date-parts":[[2025,6,6]]},"reference":[{"key":"e_1_3_3_1_2_2","unstructured":"Parham Abed\u00a0Azad and Hamid Beigy. 2024. Multi-BERT: Leveraging Adapters and Prompt Tuning for Low-Resource Multi-Domain Adaptation. arXiv e-prints NA NA (2024) arXiv\u20132404."},{"key":"e_1_3_3_1_3_2","unstructured":"Farid Ariai Maryam Tayefeh\u00a0Mahmoudi and Ali Moeini. 2024. Enhancing Aspect-based Sentiment Analysis with ParsBERT in Persian Language. Journal of AI and Data Mining 12 1 (2024) 1\u201314."},{"key":"e_1_3_3_1_4_2","unstructured":"Abhik Bhattacharjee Tahmid Hasan Wasi\u00a0Uddin Ahmad Kazi Samin Md\u00a0Saiful Islam Anindya Iqbal M\u00a0Sohel Rahman and Rifat Shahriyar. 2021. BanglaBERT: Language model pretraining and benchmarks for low-resource language understanding evaluation in Bangla. arXiv preprint arXiv:https:\/\/arXiv.org\/abs\/2101.00204 1 (2021) 1."},{"key":"e_1_3_3_1_5_2","doi-asserted-by":"publisher","DOI":"10.1109\/CSICC58665.2023.10105414"},{"key":"e_1_3_3_1_6_2","doi-asserted-by":"publisher","DOI":"10.1109\/ICWR57742.2023.10139063"},{"key":"e_1_3_3_1_7_2","unstructured":"Jacob Devlin. 2018. Bert: Pre-training of deep bidirectional transformers for language understanding. arXiv preprint arXiv:https:\/\/arXiv.org\/abs\/1810.04805 1 1 (2018)."},{"key":"e_1_3_3_1_8_2","doi-asserted-by":"crossref","unstructured":"Mehrdad Farahani Mohammad Gharachorloo Marzieh Farahani and Mohammad Manthouri. 2021. Parsbert: Transformer-based model for persian language understanding. Neural Processing Letters 53 (2021) 3831\u20133847.","DOI":"10.1007\/s11063-021-10528-4"},{"key":"e_1_3_3_1_9_2","doi-asserted-by":"crossref","unstructured":"Mehrdad Farahani Mohammad Gharachorloo Marzieh Farahani and Mohammad Manthouri. 2021. Parsbert: Transformer-based model for persian language understanding. Neural Processing Letters 53 (2021) 3831\u20133847.","DOI":"10.1007\/s11063-021-10528-4"},{"key":"e_1_3_3_1_10_2","doi-asserted-by":"crossref","unstructured":"Mehrdad Farahani Mohammad Gharachorloo Marzieh Farahani and Mohammad Manthouri. 2021. Parsbert: Transformer-based model for persian language understanding. Neural Processing Letters 53 (2021) 3831\u20133847.","DOI":"10.1007\/s11063-021-10528-4"},{"key":"e_1_3_3_1_11_2","doi-asserted-by":"publisher","DOI":"10.1109\/CSICC52343.2021.9420563"},{"key":"e_1_3_3_1_12_2","unstructured":"Pengcheng He Xiaodong Liu Jianfeng Gao and Weizhu Chen. 2020. Deberta: Decoding-enhanced bert with disentangled attention. arXiv preprint arXiv:https:\/\/arXiv.org\/abs\/2006.03654 1 (2020) 1."},{"key":"e_1_3_3_1_13_2","first-page":"257","volume-title":"Proceedings of the 3rd Annual Meeting of the Special Interest Group on Under-resourced Languages@ LREC-COLING 2024","author":"Hussiny Mohammad\u00a0Ali","year":"2024","unstructured":"Mohammad\u00a0Ali Hussiny, Mohammad\u00a0Arif Payenda, and Lilja \u00d8vrelid. 2024. PersianEmo: Enhancing Farsi-Dari Emotion Analysis with a Hybrid Transformer and Recurrent Neural Network Model. In Proceedings of the 3rd Annual Meeting of the Special Interest Group on Under-resourced Languages@ LREC-COLING 2024. Proceedings of the 3rd Annual Meeting of the Special Interest Group on Under-resourced Languages@ LREC-COLING 2024, Proceedings of the 3rd Annual Meeting of the Special Interest Group on Under-resourced Languages@ LREC-COLING 2024, 257\u2013263."},{"key":"e_1_3_3_1_14_2","first-page":"257","volume-title":"Proceedings of the 3rd Annual Meeting of the Special Interest Group on Under-resourced Languages@ LREC-COLING 2024","author":"Hussiny Mohammad\u00a0Ali","year":"2024","unstructured":"Mohammad\u00a0Ali Hussiny, Mohammad\u00a0Arif Payenda, and Lilja \u00d8vrelid. 2024. PersianEmo: Enhancing Farsi-Dari Emotion Analysis with a Hybrid Transformer and Recurrent Neural Network Model. In Proceedings of the 3rd Annual Meeting of the Special Interest Group on Under-resourced Languages@ LREC-COLING 2024. ELRA and ICCL, Torino, Italia, 257\u2013263."},{"key":"e_1_3_3_1_15_2","unstructured":"imbalanced-learn. 2023. Over-sampling \u2014 Version 0.9.1. https:\/\/imbalanced-learn.org\/stable\/over_sampling.html Accessed: 2024-08-01."},{"key":"e_1_3_3_1_16_2","doi-asserted-by":"crossref","unstructured":"Mandar Joshi Danqi Chen Yinhan Liu Daniel\u00a0S Weld Luke Zettlemoyer and Omer Levy. 2020. Spanbert: Improving pre-training by representing and predicting spans. Transactions of the association for computational linguistics 8 (2020) 64\u201377.","DOI":"10.1162\/tacl_a_00300"},{"key":"e_1_3_3_1_17_2","unstructured":"Sadullah Karimi. 2022. Survey of Afghan (Dari) Language NLP for Building Afghan NLIDB System. Ph.D. Dissertation. Brac University."},{"key":"e_1_3_3_1_18_2","first-page":"2","volume-title":"Proceedings of naacL-HLT","volume":"1","author":"Kenton Jacob Devlin Ming-Wei\u00a0Chang","year":"2019","unstructured":"Jacob Devlin Ming-Wei\u00a0Chang Kenton and Lee\u00a0Kristina Toutanova. 2019. Bert: Pre-training of deep bidirectional transformers for language understanding. In Proceedings of naacL-HLT , Vol.\u00a01. Minneapolis, Minnesota, Proceedings of naacL-HLT, 2."},{"key":"e_1_3_3_1_19_2","unstructured":"Z Lan. 2019. Albert: A lite bert for self-supervised learning of language representations. arXiv preprint arXiv:https:\/\/arXiv.org\/abs\/1909.11942 1 1 (2019)."},{"key":"e_1_3_3_1_20_2","unstructured":"Yinhan Liu. 2019. Roberta: A robustly optimized bert pretraining approach. arXiv preprint arXiv:https:\/\/arXiv.org\/abs\/1907.11692 1 1 (2019)."},{"key":"e_1_3_3_1_21_2","doi-asserted-by":"crossref","unstructured":"Ali Mehrban and Pegah Ahadian. 2023. evaluating bert and parsbert for analyzing persian advertisement data. arXiv preprint arXiv:https:\/\/arXiv.org\/abs\/2305.02426 12 2 (2023) 2.","DOI":"10.5121\/ijnlc.2023.12202"},{"key":"e_1_3_3_1_22_2","doi-asserted-by":"crossref","unstructured":"Mahnaz Panahandeh\u00a0Nigjeh and Shirin Ghanbari. 2024. Leveraging ParsBERT for cross-domain polarity sentiment classification of Persian social media comments. Multimedia Tools and Applications 83 4 (2024) 10677\u201310694.","DOI":"10.1007\/s11042-023-16067-5"},{"key":"e_1_3_3_1_23_2","doi-asserted-by":"publisher","DOI":"10.1109\/CSICC52343.2021.9420569"},{"key":"e_1_3_3_1_24_2","unstructured":"V Sanh. 2019. DistilBERT A Distilled Version of BERT: Smaller Faster Cheaper and Lighter. arXiv preprint arXiv:https:\/\/arXiv.org\/abs\/1910.01108 1 1 (2019) NA."},{"key":"e_1_3_3_1_25_2","doi-asserted-by":"publisher","DOI":"10.1109\/AISP61396.2024.10475235"},{"key":"e_1_3_3_1_26_2","doi-asserted-by":"publisher","DOI":"10.1109\/ICWR51868.2021.9443119"},{"key":"e_1_3_3_1_27_2","doi-asserted-by":"crossref","unstructured":"Hadi Veisi Kozhin muhealddin Awlla and Abdulhady\u00a0Abas Abdullah. 2024. KuBERT: Central Kurdish BERT Model and Its Application for Sentiment Analysis. Research Square NA NA (2024) NA.","DOI":"10.21203\/rs.3.rs-4552724\/v1"},{"key":"e_1_3_3_1_28_2","doi-asserted-by":"publisher","DOI":"10.1109\/QICAR61538.2024.10496609"}],"event":{"name":"ICCA 2024: 3rd International Conference on Computing Advancements","acronym":"ICCA 2024","location":"Dhaka Bangladesh"},"container-title":["Proceedings of the 3rd International Conference on Computing Advancements"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3723178.3723231","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3723178.3723231","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,19]],"date-time":"2025-06-19T01:56:47Z","timestamp":1750298207000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3723178.3723231"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,10,17]]},"references-count":27,"alternative-id":["10.1145\/3723178.3723231","10.1145\/3723178"],"URL":"https:\/\/doi.org\/10.1145\/3723178.3723231","relation":{},"subject":[],"published":{"date-parts":[[2024,10,17]]},"assertion":[{"value":"2025-06-06","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}