{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,1,12]],"date-time":"2026-01-12T21:50:17Z","timestamp":1768254617485,"version":"3.49.0"},"publisher-location":"Singapore","reference-count":20,"publisher":"Springer Nature Singapore","isbn-type":[{"value":"9789819770069","type":"print"},{"value":"9789819770076","type":"electronic"}],"license":[{"start":{"date-parts":[[2024,9,22]],"date-time":"2024-09-22T00:00:00Z","timestamp":1726963200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2024,9,22]],"date-time":"2024-09-22T00:00:00Z","timestamp":1726963200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025]]},"DOI":"10.1007\/978-981-97-7007-6_14","type":"book-chapter","created":{"date-parts":[[2024,9,21]],"date-time":"2024-09-21T18:01:43Z","timestamp":1726941703000},"page":"193-207","update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":1,"title":["Rehabilitation Training Program Recommendation System Based on ALBERT-LDA Model"],"prefix":"10.1007","author":[{"given":"Xiaozhuang","family":"Zhu","sequence":"first","affiliation":[]},{"given":"Qianqian","family":"Xu","sequence":"additional","affiliation":[]},{"given":"Nuo","family":"Gao","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2024,9,22]]},"reference":[{"key":"14_CR1","doi-asserted-by":"publisher","first-page":"139069","DOI":"10.1109\/ACCESS.2019.2941503","volume":"7","author":"Z Liu","year":"2019","unstructured":"Liu, Z., Zheng, Z., Guo, X., et al.: AttentiveHerb: a novel method for traditional medicine prescription generation. IEEE Access 7, 139069\u2013139085 (2019)","journal-title":"IEEE Access"},{"key":"14_CR2","doi-asserted-by":"crossref","unstructured":"Wang, J.: Cloud-based intelligent self-diagnosis and department recommendation service using Chinese medical BERT (2021)","DOI":"10.1186\/s13677-020-00218-2"},{"key":"14_CR3","unstructured":"Blei, D.M., Ng, A.Y., Jordan, M.I.: Latent Dirichlet allocation. J. Mach. Learn. Res. 3(Jan), 993\u20131022 (2003). Kim, Y.: Convolutional Neural Networks for Sentence Classification. arXiv (2014)"},{"key":"14_CR4","series-title":"LNDECT","doi-asserted-by":"publisher","first-page":"618","DOI":"10.1007\/978-3-319-59463-7_62","volume-title":"EIDWT 2017","author":"Y Zhang","year":"2018","unstructured":"Zhang, Y., Ma, J., Wang, Z., et al.: LF-LDA: a topic model for multi-label classification. In: Barolli, L., Zhang, M., Wang, X.A. (eds.) EIDWT 2017. LNDECT, vol. 6, pp. 618\u2013628. Springer, Cham (2018). https:\/\/doi.org\/10.1007\/978-3-319-59463-7_62"},{"key":"14_CR5","unstructured":"Vaswani, A., Shazeer, N., Parmar, N., et al.: Attention is all you need. In: Advances in Neural Information Processing Systems, vol. 30 (2017)"},{"key":"14_CR6","unstructured":"Devlin, J., Chang, M.W., Lee, K., et al.: BERT: pre-training of deep bidirectional transformers for language understanding. arXiv (2019)"},{"issue":"6","key":"14_CR7","doi-asserted-by":"publisher","first-page":"192","DOI":"10.3390\/fi15060192","volume":"15","author":"KI Roumeliotis","year":"2023","unstructured":"Roumeliotis, K.I., Tselikas, N.D.: ChatGPT and open-AI models: a preliminary review. Future Internet 15(6), 192 (2023)","journal-title":"Future Internet"},{"key":"14_CR8","doi-asserted-by":"crossref","unstructured":"Liu, X., Zheng, Y., Du, Z., et al.: GPT understands, too. AI Open (2023)","DOI":"10.1016\/j.aiopen.2023.08.012"},{"key":"14_CR9","unstructured":"Lan, Z., Chen, M., Goodman, S., et al.: ALBERT: a Lite BERT for self-supervised learning of language representations. arXiv (2020)"},{"key":"14_CR10","doi-asserted-by":"crossref","unstructured":"Yang, N., Jo, J., Jeon, M., et al.: Semantic and explainable research-related recommendation system based on semi-supervised methodology using BERT and LDA models. Expert Syst. Appl. 190, 116209 (2022)","DOI":"10.1016\/j.eswa.2021.116209"},{"key":"14_CR11","doi-asserted-by":"crossref","unstructured":"Choi, H., Kim, J., Joe, S., et al.: Evaluation of BERT and ALBERT sentence embedding performance on downstream NLP tasks. arXiv (2021)","DOI":"10.1109\/ICPR48806.2021.9412102"},{"key":"14_CR12","doi-asserted-by":"publisher","first-page":"1650","DOI":"10.1109\/ACCESS.2023.3347029","volume":"12","author":"G Bo","year":"2024","unstructured":"Bo, G., Shanshan, W., Qing, Z., et al.: Empowering medical data analysis: an advanced deep fusion model for sorting medicine document. IEEE Access 12, 1650\u20131659 (2024)","journal-title":"IEEE Access"},{"key":"14_CR13","doi-asserted-by":"publisher","first-page":"366","DOI":"10.1016\/j.neucom.2019.07.052","volume":"363","author":"B Guo","year":"2019","unstructured":"Guo, B., Zhang, C., Liu, J., et al.: Improving text classification with weighted word embeddings via a multi-channel TextCNN model. Neurocomputing 363, 366\u2013374 (2019)","journal-title":"Neurocomputing"},{"key":"14_CR14","doi-asserted-by":"publisher","first-page":"67542","DOI":"10.1109\/ACCESS.2020.2983568","volume":"8","author":"C Liu","year":"2020","unstructured":"Liu, C., Liu, Y., Yan, Y., et al.: An intrusion detection model with hierarchical attention mechanism. IEEE Access 8, 67542\u201367554 (2020)","journal-title":"IEEE Access"},{"key":"14_CR15","doi-asserted-by":"crossref","unstructured":"Jelodar, H., Wang, Y., Yuan, C., et al.: Latent Dirichlet allocation (LDA) and topic modeling: models, applications, a survey. arXiv (2018)","DOI":"10.1007\/s11042-018-6894-4"},{"key":"14_CR16","doi-asserted-by":"crossref","unstructured":"Narayan, S., Cohen, S.B., Lapata, M.: Don\u2019t give me the details, just the summary! Topic-aware convolutional neural networks for extreme summarization. arXiv (2018)","DOI":"10.18653\/v1\/D18-1206"},{"key":"14_CR17","doi-asserted-by":"crossref","unstructured":"Meister, C., Cotterell, R.: Language model evaluation beyond perplexity. arXiv (2021)","DOI":"10.18653\/v1\/2021.acl-long.414"},{"issue":"1","key":"14_CR18","doi-asserted-by":"publisher","first-page":"201","DOI":"10.1007\/s44196-021-00055-4","volume":"14","author":"W Liu","year":"2021","unstructured":"Liu, W., Pang, J., Li, N., et al.: Research on multi-label text classification method based on tALBERT-CNN. Int. J. Comput. Intell. Syst. 14(1), 201 (2021)","journal-title":"Int. J. Comput. Intell. Syst."},{"key":"14_CR19","unstructured":"Zhang, Z., Ni, W., Liu, J., et al.: A study on Japanese text multi-classification with ALBERT-TextCNN"},{"key":"14_CR20","doi-asserted-by":"publisher","unstructured":"Yang, S., Lu, H.: Artificial Intelligence and Robotics, vol. 1701, pp. 257\u2013266. Springer, Singapore (2022). https:\/\/doi.org\/10.1007\/978-3-319-69877-9","DOI":"10.1007\/978-3-319-69877-9"}],"container-title":["Communications in Computer and Information Science","Neural Computing for Advanced Applications"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/978-981-97-7007-6_14","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,9,21]],"date-time":"2024-09-21T18:05:02Z","timestamp":1726941902000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/978-981-97-7007-6_14"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,9,22]]},"ISBN":["9789819770069","9789819770076"],"references-count":20,"URL":"https:\/\/doi.org\/10.1007\/978-981-97-7007-6_14","relation":{},"ISSN":["1865-0929","1865-0937"],"issn-type":[{"value":"1865-0929","type":"print"},{"value":"1865-0937","type":"electronic"}],"subject":[],"published":{"date-parts":[[2024,9,22]]},"assertion":[{"value":"22 September 2024","order":1,"name":"first_online","label":"First Online","group":{"name":"ChapterHistory","label":"Chapter History"}},{"value":"NCAA","order":1,"name":"conference_acronym","label":"Conference Acronym","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"International Conference on Neural Computing for Advanced Applications","order":2,"name":"conference_name","label":"Conference Name","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Guilin","order":3,"name":"conference_city","label":"Conference City","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"China","order":4,"name":"conference_country","label":"Conference Country","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"2024","order":5,"name":"conference_year","label":"Conference Year","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"5 July 2024","order":7,"name":"conference_start_date","label":"Conference Start Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"7 July 2024","order":8,"name":"conference_end_date","label":"Conference End Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"5","order":9,"name":"conference_number","label":"Conference Number","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"ncaa2024","order":10,"name":"conference_id","label":"Conference ID","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"https:\/\/aaci.org.hk\/ncaa2024\/","order":11,"name":"conference_url","label":"Conference URL","group":{"name":"ConferenceInfo","label":"Conference Information"}}]}}