{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,8,21]],"date-time":"2025-08-21T12:40:13Z","timestamp":1755780013888,"version":"3.44.0"},"publisher-location":"New York, NY, USA","reference-count":14,"publisher":"ACM","license":[{"start":{"date-parts":[[2023,12,22]],"date-time":"2023-12-22T00:00:00Z","timestamp":1703203200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2023,12,22]]},"DOI":"10.1145\/3660043.3660104","type":"proceedings-article","created":{"date-parts":[[2024,5,30]],"date-time":"2024-05-30T10:18:07Z","timestamp":1717064287000},"page":"340-344","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":0,"title":["A Relationship Extraction Model Based on Reinforcement Learning and Multi-Teacher Knowledge Distillation"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0009-0007-6544-3389","authenticated-orcid":false,"given":"Zhenhao","family":"Cao","sequence":"first","affiliation":[{"name":"Changchun University of Science and Technology, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-7822-0041","authenticated-orcid":false,"given":"Ningjia","family":"Qiu","sequence":"additional","affiliation":[{"name":"Changchun University of Science and Technology, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-5165-6681","authenticated-orcid":false,"given":"Peng","family":"Wang","sequence":"additional","affiliation":[{"name":"Changchun University of Science and Technology, China"}]}],"member":"320","published-online":{"date-parts":[[2024,5,30]]},"reference":[{"key":"e_1_3_2_1_1_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/N19-1423"},{"key":"e_1_3_2_1_2_1","doi-asserted-by":"crossref","unstructured":"Lu W Jiao J Zhang R. Twinbert: Distilling knowledge to twin-structured compressed bert models for large-scale retrieval[C]\/\/Proceedings of the 29th ACM International Conference on Information & Knowledge Management. 2020 2645-2652.","DOI":"10.1145\/3340531.3412747"},{"key":"e_1_3_2_1_3_1","volume-title":"NewsBERT: Distilling pre-trained language model for intelligent news application [J]. arXiv preprint arXiv:2102.04887","author":"Wu C","year":"2021","unstructured":"Wu C, Wu F, Yu Y, NewsBERT: Distilling pre-trained language model for intelligent news application [J]. arXiv preprint arXiv:2102.04887, 2021."},{"key":"e_1_3_2_1_4_1","volume-title":"Matching the blanks: Distributional similarity for relation learning [J]. arXiv preprint arXiv:1906.03158","author":"Soares L B","year":"2019","unstructured":"Soares L B, FitzGerald N, Ling J, Matching the blanks: Distributional similarity for relation learning [J]. arXiv preprint arXiv:1906.03158, 2019."},{"key":"e_1_3_2_1_5_1","volume-title":"CCKS2019-shared task","author":"Shen T","year":"2019","unstructured":"Shen T, Wang D, Feng S, Bert-based denoising and reconstructing data of distant supervision for relation extraction[J]. CCKS2019-shared task, 2019."},{"issue":"7","key":"e_1_3_2_1_6_1","first-page":"38","volume":"14","author":"Hinton G","year":"2015","unstructured":"Hinton G, Vinyals O, Dean J. Distilling the Knowledge in a Neural Network[J]. Computer Science, 2015, 14(7):38-39.","journal-title":"Computer Science"},{"key":"e_1_3_2_1_7_1","volume-title":"Model compression with two-stage multi-teacher knowledge distillation for web question answering system[C]\/\/Proceedings of the 13th International Conference on Web Search and Data Mining","author":"Yang Z","year":"2020","unstructured":"Yang Z, Shou L, Gong M, Model compression with two-stage multi-teacher knowledge distillation for web question answering system[C]\/\/Proceedings of the 13th International Conference on Web Search and Data Mining. 2020, 690-698."},{"key":"e_1_3_2_1_8_1","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i16.17680"},{"key":"e_1_3_2_1_9_1","volume-title":"RoBERTa: A Robustly Optimized BERT Pretraining Approach","author":"Liu M.","year":"2019","unstructured":"Y. Liu, M. Ott, and N. Goyal, RoBERTa: A Robustly Optimized BERT Pretraining Approach, 2019, 1218\u20131227."},{"volume-title":"ICLR","author":"Clark Q.","key":"e_1_3_2_1_10_1","unstructured":"K. Clark, and Q. Le, ELECTRA: Pre-training Text Encoders as Discriminators Rather Than Generators, ICLR, 2020: 77\u201387."},{"key":"e_1_3_2_1_11_1","volume-title":"Enriching Pre-trained Language Model with Entity Information for Relation Classification [C]\/\/Proceedings of the 28th ACM international conference on information and knowledge management","author":"Wu Y.","year":"2019","unstructured":"S. Wu and Y. He, Enriching Pre-trained Language Model with Entity Information for Relation Classification [C]\/\/Proceedings of the 28th ACM international conference on information and knowledge management. 2019, 2361-2364."},{"key":"e_1_3_2_1_12_1","volume-title":"SemEval-2010 Task 8: Multi-Way Classification of Semantic Relations Between Pairs of Nominals, arXiv","author":"Hendrickx","year":"2019","unstructured":"I. Hendrickx, SemEval-2010 Task 8: Multi-Way Classification of Semantic Relations Between Pairs of Nominals, arXiv, 2019."},{"key":"e_1_3_2_1_13_1","first-page":"4803","author":"Han A","year":"2018","unstructured":"X. Han, FewRel: A Large-Scale Supervised Few-Shot Relation Classification Dataset with State-of-the-Art Evaluation, Empirical Methods in Natural Language Processing, 2018, pp. 4803-4809.","journal-title":"Empirical Methods in Natural Language Processing"},{"key":"e_1_3_2_1_14_1","series-title":"Lecture Notes in Computer Science","volume-title":"DuIE: A Large-Scale Chinese Dataset for Information Extraction","author":"Li H.","year":"2019","unstructured":"S. Li, and H. Wei, DuIE: A Large-Scale Chinese Dataset for Information Extraction, Lecture Notes in Computer Science, Jan. 2019, 791\u2013800."}],"event":{"name":"ICIEAI 2023: 2023 International Conference on Information Education and Artificial Intelligence","acronym":"ICIEAI 2023","location":"Xiamen China"},"container-title":["Proceedings of the 2023 International Conference on Information Education and Artificial Intelligence"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3660043.3660104","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3660043.3660104","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,8,21]],"date-time":"2025-08-21T12:21:01Z","timestamp":1755778861000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3660043.3660104"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2023,12,22]]},"references-count":14,"alternative-id":["10.1145\/3660043.3660104","10.1145\/3660043"],"URL":"https:\/\/doi.org\/10.1145\/3660043.3660104","relation":{},"subject":[],"published":{"date-parts":[[2023,12,22]]},"assertion":[{"value":"2024-05-30","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}