{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,2,16]],"date-time":"2026-02-16T16:57:12Z","timestamp":1771261032649,"version":"3.50.1"},"reference-count":49,"publisher":"Springer Science and Business Media LLC","issue":"6","license":[{"start":{"date-parts":[[2023,12,7]],"date-time":"2023-12-07T00:00:00Z","timestamp":1701907200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2023,12,7]],"date-time":"2023-12-07T00:00:00Z","timestamp":1701907200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"funder":[{"DOI":"10.13039\/501100012456","name":"National Social Science Fund of China","doi-asserted-by":"publisher","award":["2017CG29"],"award-info":[{"award-number":["2017CG29"]}],"id":[{"id":"10.13039\/501100012456","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["Int. J. Mach. Learn. &amp; Cyber."],"published-print":{"date-parts":[[2024,6]]},"DOI":"10.1007\/s13042-023-02023-0","type":"journal-article","created":{"date-parts":[[2023,12,7]],"date-time":"2023-12-07T18:01:37Z","timestamp":1701972097000},"page":"2199-2208","update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":7,"title":["Local or global? A novel transformer for Chinese named entity recognition based on multi-view and sliding attention"],"prefix":"10.1007","volume":"15","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-0360-8738","authenticated-orcid":false,"given":"Yuke","family":"Wang","sequence":"first","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0001-7568-6017","authenticated-orcid":false,"given":"Ling","family":"Lu","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0003-0599-1758","authenticated-orcid":false,"given":"Wu","family":"Yang","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0002-8780-3994","authenticated-orcid":false,"given":"Yinong","family":"Chen","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2023,12,7]]},"reference":[{"key":"2023_CR1","doi-asserted-by":"publisher","DOI":"10.3389\/fpsyg.2021.615538","volume":"12","author":"H Brouwer","year":"2021","unstructured":"Brouwer H, Delogu F, Venhuizen NJ, Crocker MW (2021) Neurobehavioral correlates of surprisal in language comprehension: a neurocomputational model. Front Psychol 12:615538","journal-title":"Front Psychol"},{"key":"2023_CR2","unstructured":"Chung J, Gulcehre C, Cho K, Bengio Y (2015) Gated feedback recurrent neural networks. In: International conference on machine learning, PMLR, pp 2067\u20132075"},{"key":"2023_CR3","doi-asserted-by":"publisher","first-page":"3504","DOI":"10.1109\/TASLP.2021.3124365","volume":"29","author":"Y Cui","year":"2021","unstructured":"Cui Y, Che W, Liu T, Qin B, Yang Z (2021) Pre-training with whole word masking for chinese bert. IEEE\/ACM Transactions on audio, speech, and language processing 29:3504\u20133514","journal-title":"IEEE\/ACM Transactions on Audio, Speech, and Language Processing"},{"key":"2023_CR4","unstructured":"Dong L, Yang N, Wang W, Wei F, Liu X, Wang Y, Hon HW (2019) Unified language model pre-training for natural language understanding and generation. In: Advances in Neural Information Processing Systems, 32"},{"key":"2023_CR5","doi-asserted-by":"crossref","unstructured":"Ding R, Xie P, Zhang X, Lu W, Li L, Si L (2019) A neural multi-digraph model for Chinese NER with gazetteers. In: Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics, pp 1462\u20131467","DOI":"10.18653\/v1\/P19-1141"},{"issue":"3","key":"2023_CR6","doi-asserted-by":"publisher","first-page":"268","DOI":"10.1109\/PROC.1973.9030","volume":"61","author":"D Forney","year":"1973","unstructured":"Forney D (1973) The Viterbi algorithm. Proc IEEE 61(3):268\u2013278","journal-title":"Proc IEEE"},{"key":"2023_CR7","doi-asserted-by":"crossref","unstructured":"Gui T, Ma R, Zhang Q, Zhao L, Jiang Y-G, Huang X (2019a) Cnn-based Chinese NER with lexicon rethinking. In: Proceedings of the 28th International Joint Conference on Artificial Intelligence, AAAI Press, pp 4982\u20134988","DOI":"10.24963\/ijcai.2019\/692"},{"key":"2023_CR8","doi-asserted-by":"crossref","unstructured":"Gui T, Zou Y, Zhang Q, Peng M, Fu J, Wei Z, Huang X-J (2019b) A lexicon-based graph neural network for Chinese NER. In: Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP -IJCNLP), pp 1039\u20131049","DOI":"10.18653\/v1\/D19-1096"},{"issue":"8","key":"2023_CR9","doi-asserted-by":"publisher","first-page":"1735","DOI":"10.1162\/neco.1997.9.8.1735","volume":"9","author":"S Hochreiter","year":"1997","unstructured":"Hochreiter S, Schmidhuber J (1997) Long short-term memory. Neural Comput 9(8):1735\u20131780","journal-title":"Neural Comput"},{"key":"2023_CR10","doi-asserted-by":"crossref","unstructured":"He H, Sun X (2017) A unified model for cross-domain and semi-supervised named entity recognition in chinese social media. In: Proceedings of the 31th AAAI Conference on Artificial Intelligence.","DOI":"10.1609\/aaai.v31i1.10977"},{"key":"2023_CR11","unstructured":"Huang M, Zhang J, Cai M, Zhang Y, Yao J, You Y, Ma Z (2020) Improving RNN transducer with normalized jointer network. arXiv preprint arXiv:2011.01576"},{"issue":"1","key":"2023_CR12","doi-asserted-by":"publisher","first-page":"16","DOI":"10.1016\/j.tics.2006.10.012","volume":"11","author":"C Koch","year":"2007","unstructured":"Koch C, Tsuchiya N (2007) Attention and consciousness: two distinct brain processes. Trends Cogn Sci 11(1):16\u201322","journal-title":"Trends Cogn Sci"},{"key":"2023_CR13","doi-asserted-by":"publisher","first-page":"23","DOI":"10.1016\/j.brainres.2006.12.063","volume":"1146","author":"GR Kuperberg","year":"2007","unstructured":"Kuperberg GR (2007) Neural mechanisms of language comprehension: challenges to syntax. Brain Res 1146:23\u201349","journal-title":"Brain Res"},{"issue":"1","key":"2023_CR14","doi-asserted-by":"publisher","first-page":"32","DOI":"10.1080\/23273798.2015.1102299","volume":"31","author":"GR Kuperberg","year":"2016","unstructured":"Kuperberg GR, Jaeger TF (2016) What do we mean by prediction in language comprehension? Lang, Cogn Neurosci 31(1):32\u201359","journal-title":"Lang, Cogn Neurosci"},{"key":"2023_CR15","unstructured":"Kingma DP, Ba J (2014) Adam: a method for stochastic optimization. arXiv e-prints"},{"key":"2023_CR16","unstructured":"Lafferty J, McCallum A, Pereira FC (2001) Conditional random fields: probabilistic models for segmenting and labeling sequence data"},{"key":"2023_CR17","unstructured":"Levow GA (2006) The third international Chinese language processing bakeoff: word segmentation and named entity recognition"},{"key":"2023_CR18","doi-asserted-by":"publisher","first-page":"634","DOI":"10.1007\/978-3-642-14932-0_78","volume-title":"Advanced intelligent computing theories and applications. With aspects of artificial intelligence","author":"Z Liu","year":"2010","unstructured":"Liu Z, Zhu C, Zhao T (2010) Chinese named entity recognition with a sequence labeling approach: based on characters, or based on words? Advanced intelligent computing theories and applications. With aspects of artificial intelligence. Springer, Berlin, Heidelberg, pp 634\u2013640"},{"key":"2023_CR19","doi-asserted-by":"crossref","unstructured":"Lample G, Ballesteros M, Subramanian S, Kawakami K, Dyer C (2016) Neural architectures for named entity recognition. In: Proc. of NAACL-HLT","DOI":"10.18653\/v1\/N16-1030"},{"key":"2023_CR20","doi-asserted-by":"crossref","unstructured":"Li X, Yan H, Qiu X, Huang X (2020) FLAT: Chinese NER using flat-lattice transformer. In: Proceedings of ACL, 2020","DOI":"10.18653\/v1\/2020.acl-main.611"},{"key":"2023_CR21","doi-asserted-by":"publisher","DOI":"10.1016\/j.eswa.2022.117727","volume":"196","author":"D Li","year":"2022","unstructured":"Li D, Yan L, Yang J, Ma Z (2022) Dependency syntax guided BERT-BiLSTM-GAM-CRF for Chinese NER. Expert Syst Appl 196:116682. https:\/\/doi.org\/10.1016\/j.eswa.2022.117727","journal-title":"Expert Syst Appl"},{"key":"2023_CR22","doi-asserted-by":"crossref","unstructured":"Mengge X, Bowen Y, Tingwen L, Yue Z, Erli M, Bin W (2019) Porous lattice-based transformer encoder for chinese NER. arXiv preprint arXiv:1911.02733","DOI":"10.18653\/v1\/2020.coling-main.340"},{"key":"2023_CR23","unstructured":"Mnih V, Heess N, Graves A, Kavukcuoglu K (2014) Recurrent models of visual attention. In: Advances in Neural Information Processing Systems, 3"},{"key":"2023_CR24","doi-asserted-by":"crossref","unstructured":"Ma R, Peng M, Zhang Q, Wei Z, Huang X-J (2020) Simplify the usage of lexicon in Chinese NER. In: Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics, pp 5951\u20135960","DOI":"10.18653\/v1\/2020.acl-main.528"},{"key":"2023_CR25","unstructured":"Mai S, Zeng Y, Zheng S, Hu H (2021) Hybrid contrastive learning of tri-modal representation for multimodal sentiment analysis"},{"issue":"2","key":"2023_CR26","doi-asserted-by":"publisher","first-page":"237","DOI":"10.3102\/00028312024002237","volume":"24","author":"W Nagy","year":"1987","unstructured":"Nagy W, Anderson R, Herman P (1987) Learning word meanings from context during normal reading. Am Educ Res J 24(2):237\u2013270","journal-title":"Am Educ Res J"},{"key":"2023_CR27","doi-asserted-by":"crossref","unstructured":"Peng N, Dredze M (2015) Named entity recognition for Chinese social media with jointly trained embeddings. In: Proceedings of the 2015 Conference on Empirical Methods in Natural Language Processing, pp 548\u2013554","DOI":"10.18653\/v1\/D15-1064"},{"key":"2023_CR28","unstructured":"Qi D, Su L, Song J, Cui E, Bharti T, Sacheti A (2020) Imagebert: cross-modal pre-training with large-scale weak-supervised image-text data. arXiv preprint arXiv:2001.07966"},{"issue":"3","key":"2023_CR29","doi-asserted-by":"publisher","first-page":"372","DOI":"10.1037\/0033-2909.124.3.372","volume":"124","author":"K Rayner","year":"1998","unstructured":"Rayner K (1998) Eye movements in reading and information processing: 20 years of research. Psychol Bull 124(3):372","journal-title":"Psychol Bull"},{"issue":"1","key":"2023_CR30","doi-asserted-by":"publisher","first-page":"27","DOI":"10.1037\/0096-1523.3.1.27","volume":"3","author":"R Schuberth","year":"1977","unstructured":"Schuberth R, Eimas P (1977) Effects of context on the classification of words and nonwords. J Exp Psychol Hum Percept Perform 3(1):27","journal-title":"J Exp Psychol Hum Percept Perform"},{"issue":"3","key":"2023_CR31","doi-asserted-by":"publisher","first-page":"658","DOI":"10.1037\/0096-1523.7.3.658","volume":"7","author":"K Stanovich","year":"1981","unstructured":"Stanovich K, West R (1981) The effect of sentence context on ongoing word recognition: tests of a two-process theory. J Exp Psychol Hum Percept Perform 7(3):658","journal-title":"J Exp Psychol Hum Percept Perform"},{"key":"2023_CR32","doi-asserted-by":"crossref","unstructured":"Sun C, Myers A, Vondrick C, Murphy K, Schmid C (2019) Videobert: a joint model for video and language representation learning. In: Proceedings of the IEEE\/CVF International Conference on Computer Vision, pp 7464\u20137473","DOI":"10.1109\/ICCV.2019.00756"},{"key":"2023_CR33","doi-asserted-by":"crossref","unstructured":"Sui D, Chen Y, Liu K, Zhao J, Liu S (2019) Leverage lexical knowledge for Chinese named entity recognition via collaborative graph network. In: Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP), pp 3821\u20133831","DOI":"10.18653\/v1\/D19-1396"},{"issue":"3","key":"2023_CR34","doi-asserted-by":"publisher","first-page":"216","DOI":"10.2307\/25470707","volume":"215","author":"G Tononi","year":"2008","unstructured":"Tononi G (2008) Consciousness as integrated information: a provisional manifesto. Biol Bull 215(3):216\u2013242","journal-title":"Biol Bull"},{"key":"2023_CR35","doi-asserted-by":"crossref","unstructured":"Tian Y, Song Y, Xia F, Zhang T, Wang Y (2020) Improving Chinese word segmentation with wordhood memory networks [C] Proceedings of ACL 2020, pp 8274\u20138285","DOI":"10.18653\/v1\/2020.acl-main.734"},{"key":"2023_CR36","doi-asserted-by":"crossref","unstructured":"Tian Z, Yi J, Bai Y, Tao J, Zhang S, Wen Z (2021) FSR: accelerating the inference process of transducer-based models by applying fast-skip regularization. arXiv preprint arXiv:2104.02882","DOI":"10.21437\/Interspeech.2021-1367"},{"key":"2023_CR37","unstructured":"Vaswani A, Shazeer N, Parmar N, Uszkoreit J, Jones L, Gomez AN, Kaiser L, Polosukhin I (2017) Attention is all you need. In: Advances in neural information processing systems, pp 5998\u20136008"},{"key":"2023_CR38","unstructured":"Velickovic P, Cucurull G, Casanova A, Romero A, Lio P, Bengio Y (2017) Graph attention networks. arXiv preprint arXiv:1710.10903"},{"key":"2023_CR39","unstructured":"Weischedel R, Pradhan S, Ramshaw L, Palmer M, Xue N, Marcus M, Houston A (2011) Ontonotes release 4.0. LDC2011T03, Philadelphia, Penn.: Linguistic Data Consortium"},{"key":"2023_CR40","doi-asserted-by":"crossref","unstructured":"Wu S, Song X, Feng Z (2021) Mect: multi-metadata embedding based cross-transformer for Chinese named entity recognition","DOI":"10.18653\/v1\/2021.acl-long.121"},{"key":"2023_CR41","doi-asserted-by":"publisher","DOI":"10.1016\/j.eswa.2022.117467","author":"Y Wang","year":"2022","unstructured":"Wang Y, Lu L, Wu Y, Chen Y (2022) Polymorphic graph attention network for Chinese NER. Expert Syst Appl. https:\/\/doi.org\/10.1016\/j.eswa.2022.117467","journal-title":"Expert Syst Appl"},{"issue":"1","key":"2023_CR42","first-page":"29","volume":"8","author":"N Xue","year":"2003","unstructured":"Xue N (2003) Chinese word segmentation as character tagging. Int J Comput Linguist Chin Lang Process 8(1):29\u201348 (Special Issue on Word Formation and Chinese Language Processing)","journal-title":"Int J Comput Linguist Chin Lang Process"},{"key":"2023_CR43","first-page":"222","volume":"207","author":"HM Yang","year":"1999","unstructured":"Yang HM, McConkie GW (1999) Reading Chinese: some basic eye-movement characteristics. Read Chin Scr: Cogn Anal 207:222","journal-title":"Read Chin Scr: Cogn Anal"},{"key":"2023_CR44","unstructured":"Yan H, Deng B, Li X, Qiu X (2019) Tener: adapting transformer encoder for named entity recognition"},{"key":"2023_CR45","doi-asserted-by":"crossref","unstructured":"Zhang Y, Yang J (2018) Chinese NER using lattice LSTM. In: Proceedings of the 56th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long papers), pp 1554\u20131564","DOI":"10.18653\/v1\/P18-1144"},{"key":"2023_CR46","doi-asserted-by":"publisher","first-page":"1898","DOI":"10.1109\/LSP.2021.3112314","volume":"28","author":"K Zhang","year":"2021","unstructured":"Zhang K, Li Y, Wang J, Wang Z, Li X (2021) Feature fusion for multimodal emotion recognition based on deep canonical correlation analysis. IEEE Signal Process Lett 28:1898\u20131902","journal-title":"IEEE Signal Process Lett"},{"key":"2023_CR47","doi-asserted-by":"crossref","unstructured":"Zhang B, Wu D, Peng Z, Song X, Yao Z, Lv H, Niu J (2022) WeNet 2.0: more productive end-to-end speech recognition toolkit. arXiv preprint arXiv:2203.15455","DOI":"10.21437\/Interspeech.2022-483"},{"key":"2023_CR48","doi-asserted-by":"crossref","unstructured":"Zhao S, Hu M, Cai Z, Chen H, Liu F (2021) Dynamic modeling cross- and self-lattice attention network for Chinese NER. In: Proceedings of the 35th AAAI Conference on Artificial Intelligence, pp 14515\u201314523","DOI":"10.1609\/aaai.v35i16.17706"},{"key":"2023_CR49","doi-asserted-by":"crossref","unstructured":"Zhao J, Li R, Jin Q, Wang X, Li H (2021) MEmoBERT: pre-training model with prompt-based learning for multimodal emotion recognition","DOI":"10.1109\/ICASSP43922.2022.9746910"}],"container-title":["International Journal of Machine Learning and Cybernetics"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s13042-023-02023-0.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s13042-023-02023-0\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s13042-023-02023-0.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,5,23]],"date-time":"2024-05-23T05:29:11Z","timestamp":1716442151000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s13042-023-02023-0"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2023,12,7]]},"references-count":49,"journal-issue":{"issue":"6","published-print":{"date-parts":[[2024,6]]}},"alternative-id":["2023"],"URL":"https:\/\/doi.org\/10.1007\/s13042-023-02023-0","relation":{},"ISSN":["1868-8071","1868-808X"],"issn-type":[{"value":"1868-8071","type":"print"},{"value":"1868-808X","type":"electronic"}],"subject":[],"published":{"date-parts":[[2023,12,7]]},"assertion":[{"value":"18 March 2023","order":1,"name":"received","label":"Received","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"25 October 2023","order":2,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"7 December 2023","order":3,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Declarations"}},{"value":"The authors have no competing interests to declare that are relevant to the content of this article.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Conflict of interest"}}]}}