{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,2,1]],"date-time":"2026-02-01T04:24:41Z","timestamp":1769919881948,"version":"3.49.0"},"publisher-location":"New York, NY, USA","reference-count":16,"publisher":"ACM","license":[{"start":{"date-parts":[[2021,4,15]],"date-time":"2021-04-15T00:00:00Z","timestamp":1618444800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2021,4,15]]},"DOI":"10.1145\/3409334.3452074","type":"proceedings-article","created":{"date-parts":[[2021,5,10]],"date-time":"2021-05-10T20:49:49Z","timestamp":1620679789000},"page":"234-238","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":9,"title":["Sentiment analysis on COVID tweets using COVID-Twitter-BERT with auxiliary sentence approach"],"prefix":"10.1145","author":[{"given":"Hung Yeh","family":"Lin","sequence":"first","affiliation":[{"name":"San Jose State University"}]},{"given":"Teng-Sheng","family":"Moh","sequence":"additional","affiliation":[{"name":"San Jose State University"}]}],"member":"320","published-online":{"date-parts":[[2021,5,10]]},"reference":[{"key":"e_1_3_2_1_1_1","volume-title":"BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding. CoRR abs\/1810.04805","author":"Devlin J.","year":"2018","unstructured":"J. Devlin, M. Chang, K. Lee, and K. Toutanova. 2018. BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding. CoRR abs\/1810.04805 (2018). arXiv:1810.04805 http:\/\/arxiv.org\/abs\/1810.04805."},{"key":"e_1_3_2_1_2_1","unstructured":"S. Kumar. 2020. COVID-19 India Sentiments on COVID-19 and Lockdown. https:\/\/www.kaggle.com\/surajkum1198\/twitterdata."},{"key":"e_1_3_2_1_3_1","doi-asserted-by":"publisher","unstructured":"R. Lamsal. 2020. Coronavirus (COVID-19) Tweets Dataset. 10.21227\/781w-ef42","DOI":"10.21227\/781w-ef42"},{"key":"e_1_3_2_1_4_1","unstructured":"I. Loshchilov and F. Hutter. 2019. Decoupled Weight Decay Regularization. arXiv:1711.05101."},{"key":"e_1_3_2_1_5_1","unstructured":"Tomas Mikolov Kai Chen Greg Corrado and Jeffrey Dean. 2013. Efficient Estimation of Word Representations in Vector Space. arXiv:1301.3781."},{"key":"e_1_3_2_1_6_1","unstructured":"M. M\u00fcller M. Salath\u00e9 and P. E. Kummervold. 2020. COVID-Twitter-BERT: A Natural Language Processing Model to Analyse COVID-19 Content on Twitter. arXiv:2005.07503."},{"key":"e_1_3_2_1_7_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/S16-1001"},{"key":"e_1_3_2_1_8_1","doi-asserted-by":"crossref","unstructured":"M. Peters M. Neumann M. Iyyer M. Gardner C. Clark K. Lee and L. Zettlemoyer. 2018. Deep Contextualized Word Representations. arXiv:1802.05365.","DOI":"10.18653\/v1\/N18-1202"},{"key":"e_1_3_2_1_9_1","unstructured":"T. C. R. 2019. Simple Transformers. https:\/\/github.com\/ThilinaRajapakse\/simpletransformers."},{"key":"e_1_3_2_1_10_1","unstructured":"V. Sanh L. Debut J. Chaumond and T. Wolf. 2020. DistilBERT a Distilled Version of BERT: Smaller Faster Cheaper and Lighter. arXiv:1910.01108."},{"key":"e_1_3_2_1_11_1","unstructured":"C. Sun L. Huang and X. Qiu. 2019. Utilizing BERT for Aspect-based Sentiment Analysis via Constructing Auxiliary Sentence. CoRR abs\/1903.09588 (2019). arXiv:1903.09588 http:\/\/arxiv.org\/abs\/1903.09588."},{"key":"e_1_3_2_1_12_1","unstructured":"I. Sutskever O. Vinyals and Q. Le. 2014. Sequence to Sequence Learning with Neural Networks. arXiv:1409.3215."},{"key":"e_1_3_2_1_13_1","unstructured":"A. Vaswani N. Shazeer N. Parmar J. Uszkoreit L. Jones A. N. Gomez L. Kaiser and I. Polosukhin. 2017. Attention is All You Need. CoRR abs\/1706.03762 (2017). arXiv:1706.03762 http:\/\/arxiv.org\/abs\/1706.03762."},{"key":"e_1_3_2_1_14_1","volume":"201","author":"Wolf T.","unstructured":"T. Wolf, L. Debut, V. Sanh, J. Chaumond, C. Delangue, A. Moi, P. Cistac, T. Rault, R. Louf, M. Funtowicz, and J. Brew. 2019. HuggingFace's Transformers: State-of-the-art Natural Language Processing. CoRR abs\/1910.03771 (2019). arXiv:1910.03771. http:\/\/arxiv.org\/abs\/1910.03771.","journal-title":"J. Brew."},{"key":"e_1_3_2_1_15_1","unstructured":"Worldometers.info. [n.d.]. COVID-19 Coronavirus Pandemic. https:\/\/www.worldometers.info\/coronavirus\/?utm_campaign=homeAdvegas1?."},{"key":"e_1_3_2_1_16_1","unstructured":"Z. Yang Z. Dai Y. Yang J. Carbonell R. Salakhutdinov and Q. V. Le. 2020. XLNet: Generalized Autoregressive Pretraining for Language Understanding. arXiv:1906.08237."}],"event":{"name":"ACM SE '21: 2021 ACM Southeast Conference","location":"Virtual Event USA","acronym":"ACM SE '21","sponsor":["ACM Association for Computing Machinery"]},"container-title":["Proceedings of the 2021 ACM Southeast Conference"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3409334.3452074","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3409334.3452074","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,17]],"date-time":"2025-06-17T22:38:40Z","timestamp":1750199920000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3409334.3452074"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2021,4,15]]},"references-count":16,"alternative-id":["10.1145\/3409334.3452074","10.1145\/3409334"],"URL":"https:\/\/doi.org\/10.1145\/3409334.3452074","relation":{},"subject":[],"published":{"date-parts":[[2021,4,15]]},"assertion":[{"value":"2021-05-10","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}