{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,6,18]],"date-time":"2025-06-18T04:14:21Z","timestamp":1750220061390,"version":"3.41.0"},"publisher-location":"New York, NY, USA","reference-count":12,"publisher":"ACM","license":[{"start":{"date-parts":[[2022,9,23]],"date-time":"2022-09-23T00:00:00Z","timestamp":1663891200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2022,9,23]]},"DOI":"10.1145\/3568199.3568216","type":"proceedings-article","created":{"date-parts":[[2023,3,6]],"date-time":"2023-03-06T12:14:02Z","timestamp":1678104842000},"page":"105-110","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":0,"title":["Segment-KBERT: Exploration on Calculating the Similarity of Patent in the Field of Traditional Chinese Medicine"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-4579-4209","authenticated-orcid":false,"given":"Hai","family":"Kuang","sequence":"first","affiliation":[{"name":"School of Computer Science, South China Normal University, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-2485-4911","authenticated-orcid":false,"given":"Yinchen","family":"Du","sequence":"additional","affiliation":[{"name":"Meta Research and development Department, UCAP Cloud Information Technology Co., LTD, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-4311-0819","authenticated-orcid":false,"given":"Yang","family":"Bai","sequence":"additional","affiliation":[{"name":"Meta Research and development Department, UCAP Cloud Information Technology Co., LTD, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-4944-2371","authenticated-orcid":false,"given":"Yan","family":"Yan","sequence":"additional","affiliation":[{"name":"Meta Research and development Department, UCAP Cloud Information Technology Co., LTD, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-0012-7303","authenticated-orcid":false,"given":"Zujian","family":"Peng","sequence":"additional","affiliation":[{"name":"Meta Research and development Department, UCAP Cloud Information Technology Co., LTD, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-9594-3209","authenticated-orcid":false,"given":"Xiongwen","family":"Pang","sequence":"additional","affiliation":[{"name":"School of Computer Science, South China Normal University, China"}]}],"member":"320","published-online":{"date-parts":[[2023,3,6]]},"reference":[{"key":"e_1_3_2_1_1_1","volume-title":"Bert: Pre-training of deep bidirectional transformers for language understanding.\u00a0arXiv preprint arXiv:1810.04805","author":"Devlin J.","year":"2018","unstructured":"Devlin , J. , Chang , M. W. , Lee , K. , & Toutanova , K. ( 2018 ). Bert: Pre-training of deep bidirectional transformers for language understanding.\u00a0arXiv preprint arXiv:1810.04805 . Devlin, J., Chang, M. W., Lee, K., & Toutanova, K. (2018). Bert: Pre-training of deep bidirectional transformers for language understanding.\u00a0arXiv preprint arXiv:1810.04805."},{"key":"e_1_3_2_1_2_1","volume-title":"Efficient estimation of word representations in vector space.\u00a0arXiv preprint arXiv:1301.3781","author":"Mikolov T.","year":"2013","unstructured":"Mikolov , T. , Chen , K. , Corrado , G. , & Dean , J. ( 2013 ). Efficient estimation of word representations in vector space.\u00a0arXiv preprint arXiv:1301.3781 . Mikolov, T., Chen, K., Corrado, G., & Dean, J. (2013). Efficient estimation of word representations in vector space.\u00a0arXiv preprint arXiv:1301.3781."},{"key":"e_1_3_2_1_3_1","volume-title":"Attention is all you need.\u00a0Advances in neural information processing systems,\u00a030","author":"Vaswani A.","year":"2017","unstructured":"Vaswani , A. , Shazeer , N. , Parmar , N. , Uszkoreit , J. , Jones , L. , Gomez , A. N., . .. & Polosukhin , I. ( 2017 ). Attention is all you need.\u00a0Advances in neural information processing systems,\u00a030 . Vaswani, A., Shazeer, N., Parmar, N., Uszkoreit, J., Jones, L., Gomez, A. N., ... & Polosukhin, I. (2017). Attention is all you need.\u00a0Advances in neural information processing systems,\u00a030."},{"key":"e_1_3_2_1_4_1","first-page":"2901","volume-title":"In\u00a0Proceedings of the AAAI Conference on Artificial Intelligence\u00a0(Vol. 34","author":"Liu W.","year":"2020","unstructured":"Liu , W. , Zhou , P. , Zhao , Z. , Wang , Z. , Ju , Q. , Deng , H. , & Wang , P. ( 2020 , April). K-bert: Enabling language representation with knowledge graph . In\u00a0Proceedings of the AAAI Conference on Artificial Intelligence\u00a0(Vol. 34 , No. 03, pp. 2901 - 2908 ). Liu, W., Zhou, P., Zhao, Z., Wang, Z., Ju, Q., Deng, H., & Wang, P. (2020, April). K-bert: Enabling language representation with knowledge graph. In\u00a0Proceedings of the AAAI Conference on Artificial Intelligence\u00a0(Vol. 34, No. 03, pp. 2901-2908)."},{"key":"e_1_3_2_1_5_1","volume-title":"Poly-encoders: Transformer architectures and pre-training strategies for fast and accurate multi-sentence scoring.\u00a0arXiv preprint arXiv:1905.01969","author":"Humeau S.","year":"2019","unstructured":"Humeau , S. , Shuster , K. , Lachaux , M. A. , & Weston , J. ( 2019 ). Poly-encoders: Transformer architectures and pre-training strategies for fast and accurate multi-sentence scoring.\u00a0arXiv preprint arXiv:1905.01969 . Humeau, S., Shuster, K., Lachaux, M. A., & Weston, J. (2019). Poly-encoders: Transformer architectures and pre-training strategies for fast and accurate multi-sentence scoring.\u00a0arXiv preprint arXiv:1905.01969."},{"key":"e_1_3_2_1_6_1","volume-title":"Improving Patent Mining and Relevance Classification using Transformers.\u00a0arXiv preprint arXiv:2105.03979","author":"Ding T.","year":"2021","unstructured":"Ding , T. , Vermeiren , W. , Ranwez , S. , & Xu , B. ( 2021 ). Improving Patent Mining and Relevance Classification using Transformers.\u00a0arXiv preprint arXiv:2105.03979 . Ding, T., Vermeiren, W., Ranwez, S., & Xu, B. (2021). Improving Patent Mining and Relevance Classification using Transformers.\u00a0arXiv preprint arXiv:2105.03979."},{"key":"e_1_3_2_1_7_1","volume-title":"Big bird: Transformers for longer sequences.\u00a0Advances in Neural Information Processing Systems,\u00a033, 17283-17297","author":"Zaheer M.","year":"2020","unstructured":"Zaheer , M. , Guruganesh , G. , Dubey , K. A. , Ainslie , J. , Alberti , C. , Ontanon , S. , ... & Ahmed , A. ( 2020 ). Big bird: Transformers for longer sequences.\u00a0Advances in Neural Information Processing Systems,\u00a033, 17283-17297 . Zaheer, M., Guruganesh, G., Dubey, K. A., Ainslie, J., Alberti, C., Ontanon, S., ... & Ahmed, A. (2020). Big bird: Transformers for longer sequences.\u00a0Advances in Neural Information Processing Systems,\u00a033, 17283-17297."},{"key":"e_1_3_2_1_8_1","volume-title":"In\u00a0CCF International Conference on Natural Language Processing and Chinese Computing\u00a0(pp. 486-498)","author":"Li W.","year":"2019","unstructured":"Li , W. , & Yang , Z. ( 2019 , October). Exploration on Generating Traditional Chinese Medicine Prescriptions from Symptoms with an End-to-End Approach . In\u00a0CCF International Conference on Natural Language Processing and Chinese Computing\u00a0(pp. 486-498) . Springer, Cham. Li, W., & Yang, Z. (2019, October). Exploration on Generating Traditional Chinese Medicine Prescriptions from Symptoms with an End-to-End Approach. In\u00a0CCF International Conference on Natural Language Processing and Chinese Computing\u00a0(pp. 486-498). Springer, Cham."},{"key":"e_1_3_2_1_9_1","volume-title":"In\u00a02019 IEEE Automatic Speech Recognition and Understanding Workshop (ASRU)\u00a0(pp. 838-844)","author":"Pappagari R.","year":"2019","unstructured":"Pappagari , R. , Zelasko , P. , Villalba , J. , Carmiel , Y. , & Dehak , N. ( 2019 , December). Hierarchical transformers for long document classification . In\u00a02019 IEEE Automatic Speech Recognition and Understanding Workshop (ASRU)\u00a0(pp. 838-844) . IEEE. Pappagari, R., Zelasko, P., Villalba, J., Carmiel, Y., & Dehak, N. (2019, December). Hierarchical transformers for long document classification. In\u00a02019 IEEE Automatic Speech Recognition and Understanding Workshop (ASRU)\u00a0(pp. 838-844). IEEE."},{"volume-title":"How to fine-tune bert for text classification?. In\u00a0China national conference on Chinese computational linguistics\u00a0(pp. 194-206)","author":"Sun C.","key":"e_1_3_2_1_10_1","unstructured":"Sun , C. , Qiu , X. , Xu , Y. , & Huang , X. (2019, October ). How to fine-tune bert for text classification?. In\u00a0China national conference on Chinese computational linguistics\u00a0(pp. 194-206) . Springer , Cham . Sun, C., Qiu, X., Xu, Y., & Huang, X. (2019, October). How to fine-tune bert for text classification?. In\u00a0China national conference on Chinese computational linguistics\u00a0(pp. 194-206). Springer, Cham."},{"key":"e_1_3_2_1_11_1","volume-title":"Bertscore: Evaluating text generation with bert.\u00a0arXiv preprint arXiv:1904.09675","author":"Zhang T.","year":"2019","unstructured":"Zhang , T. , Kishore , V. , Wu , F. , Weinberger , K. Q. , & Artzi , Y. ( 2019 ). Bertscore: Evaluating text generation with bert.\u00a0arXiv preprint arXiv:1904.09675 . Zhang, T., Kishore, V., Wu, F., Weinberger, K. Q., & Artzi, Y. (2019). Bertscore: Evaluating text generation with bert.\u00a0arXiv preprint arXiv:1904.09675."},{"key":"e_1_3_2_1_12_1","volume-title":"Cogltx: Applying bert to long texts.\u00a0Advances in Neural Information Processing Systems,\u00a033, 12792-12804","author":"Ding M.","year":"2020","unstructured":"Ding , M. , Zhou , C. , Yang , H. , & Tang , J. ( 2020 ). Cogltx: Applying bert to long texts.\u00a0Advances in Neural Information Processing Systems,\u00a033, 12792-12804 . Ding, M., Zhou, C., Yang, H., & Tang, J. (2020). Cogltx: Applying bert to long texts.\u00a0Advances in Neural Information Processing Systems,\u00a033, 12792-12804."}],"event":{"name":"MLMI 2022: 2022 5th International Conference on Machine Learning and Machine Intelligence","acronym":"MLMI 2022","location":"Hangzhou China"},"container-title":["2022 5th International Conference on Machine Learning and Machine Intelligence"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3568199.3568216","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3568199.3568216","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,17]],"date-time":"2025-06-17T18:08:43Z","timestamp":1750183723000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3568199.3568216"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2022,9,23]]},"references-count":12,"alternative-id":["10.1145\/3568199.3568216","10.1145\/3568199"],"URL":"https:\/\/doi.org\/10.1145\/3568199.3568216","relation":{},"subject":[],"published":{"date-parts":[[2022,9,23]]},"assertion":[{"value":"2023-03-06","order":2,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}