{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,2,17]],"date-time":"2026-02-17T12:01:57Z","timestamp":1771329717706,"version":"3.50.1"},"publisher-location":"New York, NY, USA","reference-count":29,"publisher":"ACM","license":[{"start":{"date-parts":[[2024,2,2]],"date-time":"2024-02-02T00:00:00Z","timestamp":1706832000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2024,2,2]]},"DOI":"10.1145\/3651671.3651709","type":"proceedings-article","created":{"date-parts":[[2024,6,7]],"date-time":"2024-06-07T18:55:50Z","timestamp":1717786550000},"page":"536-544","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":1,"title":["BERT-Based Models with Attention Mechanism and Lambda Layer for Biomedical Named Entity Recognition"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0009-0002-1157-6657","authenticated-orcid":false,"given":"Yuning","family":"Shi","sequence":"first","affiliation":[{"name":"Electrical Engineering and Computer Science, Shibaura Institute of Technology, Japan"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-3991-4259","authenticated-orcid":false,"given":"Masaomi","family":"Kimura","sequence":"additional","affiliation":[{"name":"Electrical Engineering and Computer Science, Shibaura Institute of Technology, Japan"}]}],"member":"320","published-online":{"date-parts":[[2024,6,7]]},"reference":[{"key":"e_1_3_2_1_1_1","volume-title":"Bert: Pre-training of deep bidirectional transformers for language understanding. arXiv preprint arXiv:1810.04805.","author":"Devlin Jacob","year":"2018","unstructured":"Jacob Devlin, Ming-Wei Chang, Kenton Lee, and Kristina Toutanova. 2018. Bert: Pre-training of deep bidirectional transformers for language understanding. arXiv preprint arXiv:1810.04805."},{"key":"e_1_3_2_1_2_1","doi-asserted-by":"crossref","unstructured":"Elman Jeffrey L. 1990. Finding structure in time. Cognitive science 14(2) 179-211.","DOI":"10.1016\/0364-0213(90)90002-E"},{"key":"e_1_3_2_1_3_1","doi-asserted-by":"crossref","unstructured":"Hochreiter Sepp and J\u00fcrgen Schmidhuber. 1997. Long short-term memory. Neural computation 9(8) 1735-1780.","DOI":"10.1162\/neco.1997.9.8.1735"},{"key":"e_1_3_2_1_4_1","doi-asserted-by":"crossref","unstructured":"Graves Alex and J\u00fcrgen Schmidhuber. 2005. Framewise phoneme classification with bidirectional LSTM and other neural network architectures. Neural networks 18(5-6) 602-610.","DOI":"10.1016\/j.neunet.2005.06.042"},{"key":"e_1_3_2_1_5_1","doi-asserted-by":"crossref","unstructured":"Emma Strubell Patrick Verga David Belanger and Andrew McCallum. 2017. Fast and accurate entity recognition with iterated dilated convolutions. arXiv preprint arXiv:1702.02098.","DOI":"10.18653\/v1\/D17-1283"},{"key":"e_1_3_2_1_6_1","unstructured":"Huang Zhiheng Wei Xu and Kai Yu. 2015. Bidirectional LSTM-CRF models for sequence tagging. arXiv preprint arXiv:1508.01991."},{"key":"e_1_3_2_1_7_1","doi-asserted-by":"crossref","unstructured":"Zhenjin Dai Xutao Wang Pin Ni Yuming Li Gangmin Li and Xuming Bai. 2019. Named entity recognition using BERT BiLSTM CRF for Chinese electronic health records. In 2019 12th international congress on image and signal processing biomedical engineering and informatics (cisp-bmei) (pp. 1-5). IEEE.","DOI":"10.1109\/CISP-BMEI48845.2019.8965823"},{"key":"e_1_3_2_1_8_1","volume-title":"Proceedings of the 6th International Conference on Graphics and Signal Processing (pp. 80-85)","author":"Sun Erhua","year":"2022","unstructured":"Cai, Xiaocheng, Erhua Sun, and Jiali Lei. 2022. Research on application of named entity recognition of electronic medical records based on BERT-IDCNN-CRF model. In Proceedings of the 6th International Conference on Graphics and Signal Processing (pp. 80-85)."},{"key":"e_1_3_2_1_9_1","unstructured":"Vaswani A. 2017. Attention is all you need. Advances in neural information processing systems 30."},{"key":"e_1_3_2_1_10_1","volume-title":"Reformer: The efficient transformer. arXiv preprint arXiv:2001.04451.","author":"Kaiser \u0141ukasz","year":"2020","unstructured":"Kitaev, Nikita, \u0141ukasz Kaiser, and Anselm Levskaya. 2020. Reformer: The efficient transformer. arXiv preprint arXiv:2001.04451."},{"key":"e_1_3_2_1_11_1","volume-title":"Lambdanetworks: Modeling long-range interactions without attention. arXiv preprint arXiv:2102.08602.","year":"2021","unstructured":"Bello, Irwan. 2021. Lambdanetworks: Modeling long-range interactions without attention. arXiv preprint arXiv:2102.08602."},{"key":"e_1_3_2_1_12_1","unstructured":"Rewon Child Scott Gray Alec Radford and Ilya Sutskever. 2019. Generating long sequences with sparse transformers. arXiv preprint arXiv:1904.10509."},{"key":"e_1_3_2_1_13_1","doi-asserted-by":"publisher","DOI":"10.1136\/amiajnl-2011-000203"},{"issue":"1","key":"e_1_3_2_1_14_1","first-page":"1","article-title":"Domain-specific language model pretraining for biomedical natural language processing","volume":"3","author":"YU GU, ROBERT TINN, HAO CHENG, MICHAEL LUCAS, NAOTO USUYAMA, XIAODONG LIU, TRISTAN NAUMANN, JIANFENG","year":"2021","unstructured":"YU GU, ROBERT TINN, HAO CHENG, MICHAEL LUCAS, NAOTO USUYAMA, XIAODONG LIU, TRISTAN NAUMANN, JIANFENG GAO, and HOIFUNG POON. 2021. Domain-specific language model pretraining for biomedical natural language processing. ACM Transactions on Computing for Healthcare (HEALTH), 3(1), 1-23.","journal-title":"ACM Transactions on Computing for Healthcare (HEALTH)"},{"key":"e_1_3_2_1_15_1","doi-asserted-by":"publisher","DOI":"10.5555\/1567594.1567610"},{"key":"e_1_3_2_1_16_1","doi-asserted-by":"crossref","unstructured":"Smith Larry 2008. Overview of BioCreative II gene mention recognition. Genome biology 9 1-19.","DOI":"10.1186\/gb-2008-9-s2-s2"},{"key":"e_1_3_2_1_17_1","volume-title":"Database","year":"2016","unstructured":"Li, Jiao, 2016. BioCreative V CDR task corpus: a resource for chemical disease relation extraction. Database, 2016."},{"key":"e_1_3_2_1_18_1","doi-asserted-by":"publisher","DOI":"10.1093\/bioinformatics\/btt580"},{"key":"e_1_3_2_1_19_1","volume-title":"Overview of the cancer genetics and pathway curation tasks of bionlp shared task","author":"Pyysalo Sampo","year":"2013","unstructured":"Sampo Pyysalo, Tomoko Ohta, Rafal Rak, Andrew Rowley, Hong-Woo Chun, Sung-Jae Jung, Sung-Pil Choi, Jun'ichi Tsujii, and Sophia Ananiadou. 2015. Overview of the cancer genetics and pathway curation tasks of bionlp shared task 2013. BMC bioinformatics, 16, 1-19."},{"key":"e_1_3_2_1_20_1","doi-asserted-by":"publisher","DOI":"10.1016\/j.jbi.2013.12.006"},{"key":"e_1_3_2_1_21_1","doi-asserted-by":"crossref","unstructured":"Alsentzer Emily 2019. Publicly available clinical BERT embeddings. arXiv preprint arXiv:1904.03323.","DOI":"10.18653\/v1\/W19-1909"},{"key":"e_1_3_2_1_22_1","unstructured":"Loshchilov Ilya and Frank Hutter. 2017. Decoupled weight decay regularization. arXiv preprint arXiv:1711.05101."},{"key":"e_1_3_2_1_23_1","doi-asserted-by":"crossref","unstructured":"Zheng Yuan Yijia Liu Chuanqi Tan Songfang Huang and Fei Huang. 2021. Improving biomedical pretrained language models with knowledge. arXiv preprint arXiv:2104.10344.","DOI":"10.18653\/v1\/2021.bionlp-1.20"},{"key":"e_1_3_2_1_24_1","doi-asserted-by":"publisher","DOI":"10.1016\/j.simpa.2021.100058"},{"key":"e_1_3_2_1_25_1","unstructured":"Sheng Zhang Hao Cheng Jianfeng Gao and Hoifung Poon. 2022. Optimizing bi-encoder for named entity recognition via contrastive learning. arXiv preprint arXiv:2208.14565."},{"key":"e_1_3_2_1_26_1","doi-asserted-by":"publisher","DOI":"10.1016\/j.simpa.2022.100373"},{"key":"e_1_3_2_1_27_1","volume-title":"Journal of Physics: Conference Series (Vol. 1550, No. 3","author":"Wang Zhili","unstructured":"Zhili Wang, Yufan Wu, Pengbin Lei, and Cheng Peng. 2020. Named entity recognition method of brazilian legal text based on pre-training model. In Journal of Physics: Conference Series (Vol. 1550, No. 3, p. 032149). IOP Publishing."},{"key":"e_1_3_2_1_28_1","doi-asserted-by":"publisher","DOI":"10.1145\/3511600"},{"key":"e_1_3_2_1_29_1","first-page":"1","article-title":"Fusion deep learning and machine learning for heterogeneous military entity recognition","volume":"2022","author":"Li Hui","year":"2022","unstructured":"Hui Li, Lin Yu, Jie Zhang, and Ming Lyu. 2022. Fusion deep learning and machine learning for heterogeneous military entity recognition. Wireless Communications and Mobile Computing, 2022, 1-11.","journal-title":"Wireless Communications and Mobile Computing"}],"event":{"name":"ICMLC 2024: 2024 16th International Conference on Machine Learning and Computing","location":"Shenzhen China","acronym":"ICMLC 2024"},"container-title":["Proceedings of the 2024 16th International Conference on Machine Learning and Computing"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3651671.3651709","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3651671.3651709","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,8,22]],"date-time":"2025-08-22T11:21:23Z","timestamp":1755861683000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3651671.3651709"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,2,2]]},"references-count":29,"alternative-id":["10.1145\/3651671.3651709","10.1145\/3651671"],"URL":"https:\/\/doi.org\/10.1145\/3651671.3651709","relation":{},"subject":[],"published":{"date-parts":[[2024,2,2]]},"assertion":[{"value":"2024-06-07","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}