{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,10,5]],"date-time":"2025-10-05T19:59:29Z","timestamp":1759694369341,"version":"3.41.0"},"publisher-location":"New York, NY, USA","reference-count":20,"publisher":"ACM","license":[{"start":{"date-parts":[[2021,10,26]],"date-time":"2021-10-26T00:00:00Z","timestamp":1635206400000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["61972275"],"award-info":[{"award-number":["61972275"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100002338","name":"Ministry of Education of the People's Republic of China","doi-asserted-by":"publisher","award":["2019ITA03006"],"award-info":[{"award-number":["2019ITA03006"]}],"id":[{"id":"10.13039\/501100002338","id-type":"DOI","asserted-by":"publisher"}]},{"name":"National Key R&D Program of China","award":["2020AAA0108504"],"award-info":[{"award-number":["2020AAA0108504"]}]}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2021,10,26]]},"DOI":"10.1145\/3459637.3482068","type":"proceedings-article","created":{"date-parts":[[2021,11,15]],"date-time":"2021-11-15T15:31:19Z","timestamp":1636990279000},"page":"3112-3116","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":3,"title":["CANCN-BERT"],"prefix":"10.1145","author":[{"given":"Zijing","family":"Ji","sequence":"first","affiliation":[{"name":"Tianjin University &amp; Tianjin Key Laboratory of Cognitive Computing and Application, Tianjin, China"}]},{"given":"Xin","family":"Wang","sequence":"additional","affiliation":[{"name":"Tianjin University &amp; Tianjin Key Laboratory of Cognitive Computing and Application, Tianjin, China"}]},{"given":"Yuxin","family":"Shen","sequence":"additional","affiliation":[{"name":"Tianjin University &amp; Tianjin Key Laboratory of Cognitive Computing and Application, Tianjin, China"}]},{"given":"Guozheng","family":"Rao","sequence":"additional","affiliation":[{"name":"Tianjin University &amp; Tianjin Key Laboratory of Cognitive Computing and Application, Tianjin, China"}]}],"member":"320","published-online":{"date-parts":[[2021,10,30]]},"reference":[{"key":"e_1_3_2_1_1_1","unstructured":"Chinese-poetry. 2019. The most comprehensive database of Chinese poetry. https:\/\/github.com\/chinese-poetry\/chinese-poetry.  Chinese-poetry. 2019. The most comprehensive database of Chinese poetry. https:\/\/github.com\/chinese-poetry\/chinese-poetry."},{"volume-title":"Proceedings of the 8th Proceedings of International Conference on Learning Representations.","author":"Clark Kevin","key":"e_1_3_2_1_2_1","unstructured":"Kevin Clark , Minh-Thang Luong , Quoc V. Le , and Christopher D. Manning . 2020. ELECTRA: Pre-training Text Encoders as Discriminators Rather Than Generators . In Proceedings of the 8th Proceedings of International Conference on Learning Representations. Kevin Clark, Minh-Thang Luong, Quoc V. Le, and Christopher D. Manning. 2020. ELECTRA: Pre-training Text Encoders as Discriminators Rather Than Generators. In Proceedings of the 8th Proceedings of International Conference on Learning Representations."},{"key":"e_1_3_2_1_3_1","unstructured":"Heywhale Community. 2021. 1946--2003 People's Daily News Corpus. https:\/\/www.heywhale.com\/mw\/dataset\/605da1e8ce98c30015d46894.  Heywhale Community. 2021. 1946--2003 People's Daily News Corpus. https:\/\/www.heywhale.com\/mw\/dataset\/605da1e8ce98c30015d46894."},{"key":"e_1_3_2_1_4_1","unstructured":"Zhonghua Book Company. 2020. The registration channel for the \"Gulian Cup\" Ancient Book Named Entity Recognition Evaluation Competition is now open! http:\/\/www.zhbc.com.cn\/zhsj\/fg\/news\/info.html?newsid=402885966e259-cb10172605463cf25cf.  Zhonghua Book Company. 2020. The registration channel for the \"Gulian Cup\" Ancient Book Named Entity Recognition Evaluation Competition is now open! http:\/\/www.zhbc.com.cn\/zhsj\/fg\/news\/info.html?newsid=402885966e259-cb10172605463cf25cf."},{"key":"e_1_3_2_1_5_1","volume-title":"Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing. 657--668","author":"Cui Yiming","year":"2020","unstructured":"Yiming Cui , Wanxiang Che , Ting Liu , Bing Qin , Shijin Wang , and Guoping Hu . 2020 . Revisiting Pre-Trained Models for Chinese Natural Language Processing . In Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing. 657--668 . Yiming Cui, Wanxiang Che, Ting Liu, Bing Qin, Shijin Wang, and Guoping Hu. 2020. Revisiting Pre-Trained Models for Chinese Natural Language Processing. In Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing. 657--668."},{"key":"e_1_3_2_1_6_1","volume-title":"Pre-Training with Whole Word Masking for Chinese BERT. arxiv","author":"Cui Yiming","year":"1906","unstructured":"Yiming Cui , Wanxiang Che , Ting Liu , Bing Qin , Ziqing Yang , Shijin Wang , and Guoping Hu. 2019. Pre-Training with Whole Word Masking for Chinese BERT. arxiv : 1906 .08101 Yiming Cui, Wanxiang Che, Ting Liu, Bing Qin, Ziqing Yang, Shijin Wang, and Guoping Hu. 2019. Pre-Training with Whole Word Masking for Chinese BERT. arxiv: 1906.08101"},{"key":"e_1_3_2_1_7_1","volume-title":"BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding. arxiv","author":"Devlin Jacob","year":"2018","unstructured":"Jacob Devlin , Ming-Wei Chang , Kenton Lee , and Kristina Toutanova . 2018 . BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding. arxiv : 1810.04805 Jacob Devlin, Ming-Wei Chang, Kenton Lee, and Kristina Toutanova. 2018. BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding. arxiv: 1810.04805"},{"key":"e_1_3_2_1_8_1","volume-title":"Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing. 4729--4740","author":"Diao Shizhe","year":"2020","unstructured":"Shizhe Diao , Jiaxin Bai , Yan Song , Tong Zhang , and Yonggang Wang . 2020 . ZEN: Pre-training Chinese Text Encoder Enhanced by N-gram Representations . In Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing. 4729--4740 . Shizhe Diao, Jiaxin Bai, Yan Song, Tong Zhang, and Yonggang Wang. 2020. ZEN: Pre-training Chinese Text Encoder Enhanced by N-gram Representations. In Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing. 4729--4740."},{"key":"e_1_3_2_1_9_1","first-page":"8","article-title":"Knowledge Representation and Sentence Segmentation of Ancient Chinese Based on Deep Language Model","volume":"35","author":"Hu Renfen","year":"2021","unstructured":"Renfen Hu , Shen Li , and Yuchen Zhu . 2021 . Knowledge Representation and Sentence Segmentation of Ancient Chinese Based on Deep Language Model . Journal of Chinese Information Processing , Vol. 35 , 4 (2021), 8 -- 15 . Renfen Hu, Shen Li, and Yuchen Zhu. 2021. Knowledge Representation and Sentence Segmentation of Ancient Chinese Based on Deep Language Model. Journal of Chinese Information Processing, Vol. 35, 4 (2021), 8--15.","journal-title":"Journal of Chinese Information Processing"},{"key":"e_1_3_2_1_10_1","volume-title":"SpanBERT: Improving Pre-training by Representing and Predicting Spans. arxiv","author":"Joshi Mandar","year":"1907","unstructured":"Mandar Joshi , Danqi Chen , Yinhan Liu , Daniel S. Weld , Luke Zettlemoyer , and Omer Levy . 2019. SpanBERT: Improving Pre-training by Representing and Predicting Spans. arxiv : 1907 .10529 Mandar Joshi, Danqi Chen, Yinhan Liu, Daniel S. Weld, Luke Zettlemoyer, and Omer Levy. 2019. SpanBERT: Improving Pre-training by Representing and Predicting Spans. arxiv: 1907.10529"},{"key":"e_1_3_2_1_11_1","volume-title":"ALBERT: A Lite BERT for Self-supervised Learning of Language Representations. arxiv","author":"Lan Zhenzhong","year":"2019","unstructured":"Zhenzhong Lan , Mingda Chen , Sebastian Goodman , Kevin Gimpel , Piyush Sharma , and Radu Soricut . 2019 . ALBERT: A Lite BERT for Self-supervised Learning of Language Representations. arxiv : 1909.11942 Zhenzhong Lan, Mingda Chen, Sebastian Goodman, Kevin Gimpel, Piyush Sharma, and Radu Soricut. 2019. ALBERT: A Lite BERT for Self-supervised Learning of Language Representations. arxiv: 1909.11942"},{"key":"e_1_3_2_1_12_1","volume-title":"RoBERTa: A Robustly Optimized BERT Pretraining Approach. arxiv","author":"Liu Yinhan","year":"1907","unstructured":"Yinhan Liu , Myle Ott , Naman Goyal , Jingfei Du , Mandar Joshi , Danqi Chen , Omer Levy , Mike Lewis , Luke Zettlemoyer , and Veselin Stoyanov . 2019. RoBERTa: A Robustly Optimized BERT Pretraining Approach. arxiv : 1907 .11692 Yinhan Liu, Myle Ott, Naman Goyal, Jingfei Du, Mandar Joshi, Danqi Chen, Omer Levy, Mike Lewis, Luke Zettlemoyer, and Veselin Stoyanov. 2019. RoBERTa: A Robustly Optimized BERT Pretraining Approach. arxiv: 1907.11692"},{"key":"e_1_3_2_1_13_1","volume-title":"Op-ed: China to make more interaction with world at higher-level opening up","author":"Sheng Zhong","year":"2019","unstructured":"Zhong Sheng . 2019 . Op-ed: China to make more interaction with world at higher-level opening up . http:\/\/en.people.cn\/n3\/2019\/0428\/c90000--9573410.html. Zhong Sheng. 2019. Op-ed: China to make more interaction with world at higher-level opening up. http:\/\/en.people.cn\/n3\/2019\/0428\/c90000--9573410.html."},{"key":"e_1_3_2_1_14_1","volume-title":"ERNIE: Enhanced Representation through Knowledge Integration. arxiv","author":"Sun Yu","year":"2019","unstructured":"Yu Sun , Shuohuan Wang , Yukun Li , Shikun Feng , Xuyi Chen , Han Zhang , Xin Tian , Danxiang Zhu , Hao Tian , and Hua Wu . 2019 . ERNIE: Enhanced Representation through Knowledge Integration. arxiv : 1904.09223 Yu Sun, Shuohuan Wang, Yukun Li, Shikun Feng, Xuyi Chen, Han Zhang, Xin Tian, Danxiang Zhu, Hao Tian, and Hua Wu. 2019. ERNIE: Enhanced Representation through Knowledge Integration. arxiv: 1904.09223"},{"key":"e_1_3_2_1_15_1","volume-title":"NEZHA: Neural Contextualized Representation for Chinese Language Understanding. arxiv","author":"Wei Junqiu","year":"2019","unstructured":"Junqiu Wei , Xiaozhe Ren , Xiaoguang Li , Wenyong Huang , Yi Liao , Yasheng Wang , Jiashu Lin , Xin Jiang , Xiao Chen , and Qun Liu . 2019 . NEZHA: Neural Contextualized Representation for Chinese Language Understanding. arxiv : 1909.00204 Junqiu Wei, Xiaozhe Ren, Xiaoguang Li, Wenyong Huang, Yi Liao, Yasheng Wang, Jiashu Lin, Xin Jiang, Xiao Chen, and Qun Liu. 2019. NEZHA: Neural Contextualized Representation for Chinese Language Understanding. arxiv: 1909.00204"},{"key":"e_1_3_2_1_16_1","first-page":"80","article-title":"Some Examples of the Writing Skills for the Beginning and End of the Chinese Traditional Articles","volume":"10","author":"Xiao Ying","year":"2000","unstructured":"Ying Xiao . 2000 . Some Examples of the Writing Skills for the Beginning and End of the Chinese Traditional Articles . Journal of Xidian University (Social Science Edition) , Vol. 10 , 2 (2000), 80 -- 85 . Ying Xiao. 2000. Some Examples of the Writing Skills for the Beginning and End of the Chinese Traditional Articles. Journal of Xidian University (Social Science Edition), Vol. 10, 2 (2000), 80--85.","journal-title":"Journal of Xidian University (Social Science Edition)"},{"key":"e_1_3_2_1_17_1","unstructured":"Liang Xu Danny Lan Xuanwei Zhang Lu Li Qianqian Dong Chenjie Cao Cong Yu Weitang Liu and Hai Hu. 2019. cluebenchmarks.com. https:\/\/www.cluebenchmarks.com\/introduce.html.  Liang Xu Danny Lan Xuanwei Zhang Lu Li Qianqian Dong Chenjie Cao Cong Yu Weitang Liu and Hai Hu. 2019. cluebenchmarks.com. https:\/\/www.cluebenchmarks.com\/introduce.html."},{"key":"e_1_3_2_1_18_1","first-page":"83","article-title":"Sentiment Polarity Analysis of Reviews Based on Shallow Text Structure (in Chinese)","volume":"25","author":"Yang Jiang","year":"2011","unstructured":"Jiang Yang , Min Hou , and Ning Wang . 2011 . Sentiment Polarity Analysis of Reviews Based on Shallow Text Structure (in Chinese) . Journal of Chinese Information Processing , Vol. 25 , 2 (2011), 83 -- 89 . Jiang Yang, Min Hou, and Ning Wang. 2011. Sentiment Polarity Analysis of Reviews Based on Shallow Text Structure (in Chinese). Journal of Chinese Information Processing, Vol. 25, 2 (2011), 83--89.","journal-title":"Journal of Chinese Information Processing"},{"key":"e_1_3_2_1_19_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D19-1637"},{"key":"e_1_3_2_1_20_1","first-page":"57","article-title":"Automatic Ancient Chinese Texts Segmentation Based on BERT","volume":"33","author":"Yu Jingsong","year":"2019","unstructured":"Jingsong Yu , Yi Wei , and Yongwei Zhang . 2019 . Automatic Ancient Chinese Texts Segmentation Based on BERT . Journal of Chinese Information Processing , Vol. 33 , 11 (2019), 57 -- 63 . Jingsong Yu, Yi Wei, and Yongwei Zhang. 2019. Automatic Ancient Chinese Texts Segmentation Based on BERT. Journal of Chinese Information Processing, Vol. 33, 11 (2019), 57--63.","journal-title":"Journal of Chinese Information Processing"}],"event":{"name":"CIKM '21: The 30th ACM International Conference on Information and Knowledge Management","sponsor":["SIGWEB ACM Special Interest Group on Hypertext, Hypermedia, and Web","SIGIR ACM Special Interest Group on Information Retrieval"],"location":"Virtual Event Queensland Australia","acronym":"CIKM '21"},"container-title":["Proceedings of the 30th ACM International Conference on Information &amp; Knowledge Management"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3459637.3482068","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3459637.3482068","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,17]],"date-time":"2025-06-17T19:30:12Z","timestamp":1750188612000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3459637.3482068"}},"subtitle":["A Joint Pre-Trained Language Model for Classical and Modern Chinese"],"short-title":[],"issued":{"date-parts":[[2021,10,26]]},"references-count":20,"alternative-id":["10.1145\/3459637.3482068","10.1145\/3459637"],"URL":"https:\/\/doi.org\/10.1145\/3459637.3482068","relation":{},"subject":[],"published":{"date-parts":[[2021,10,26]]},"assertion":[{"value":"2021-10-30","order":2,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}