{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,2,21]],"date-time":"2025-02-21T20:22:11Z","timestamp":1740169331631,"version":"3.37.3"},"reference-count":44,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2023,1,1]],"date-time":"2023-01-01T00:00:00Z","timestamp":1672531200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/legalcode"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62107021"],"award-info":[{"award-number":["62107021"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Knowledge Innovation Program of Wuhan-Basic Research"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Access"],"published-print":{"date-parts":[[2023]]},"DOI":"10.1109\/access.2023.3249214","type":"journal-article","created":{"date-parts":[[2023,3,2]],"date-time":"2023-03-02T18:25:45Z","timestamp":1677781545000},"page":"20298-20308","source":"Crossref","is-referenced-by-count":1,"title":["Accelerating Semi-Supervised Text Classification by K-Way Projecting Networks"],"prefix":"10.1109","volume":"11","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-2315-4972","authenticated-orcid":false,"given":"Qiyuan","family":"Chen","sequence":"first","affiliation":[{"name":"School of Mathematics and Statistics, Central China Normal University, Wuhan, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-7795-8522","authenticated-orcid":false,"given":"Haitong","family":"Yang","sequence":"additional","affiliation":[{"name":"School of Computer Science, Central China Normal University, Wuhan, China"}]},{"given":"Pai","family":"Peng","sequence":"additional","affiliation":[{"name":"School of Mathematics and Statistics, Central China Normal University, Wuhan, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-9191-9367","authenticated-orcid":false,"given":"Le","family":"Li","sequence":"additional","affiliation":[{"name":"School of Mathematics and Statistics, Central China Normal University, Wuhan, China"}]}],"member":"263","reference":[{"key":"ref1","first-page":"4171","article-title":"BERT: Pre-training of deep bidirectional transformers for language understanding","volume-title":"Proc. Conf. North Amer. Chapter Assoc. Comput. Linguistics, Hum. Lang. Technol.","volume":"1","author":"Devlin"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.48550\/ARXIV.1907.11692"},{"key":"ref3","first-page":"1","article-title":"XLNet: Generalized autoregressive pretraining for language understanding","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"32","author":"Yang"},{"key":"ref4","first-page":"344","article-title":"Audio ALBERT: A lite bert for self-supervised learning of audio representation","volume-title":"Proc. IEEE Spoken Lang. Technol. Workshop (SLT)","author":"Lan"},{"article-title":"Improving language understanding by generative pre-training","year":"2018","author":"Radford","key":"ref5"},{"key":"ref6","first-page":"1","article-title":"MixMatch: A holistic approach to semi-supervised learning","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"32","author":"Berthelot"},{"article-title":"RemixMatch: Semi-supervised learning with distribution matching and augmentation anchoring","volume-title":"Proc. Int. Conf. Learn. Represent.","author":"Berthelot","key":"ref7"},{"key":"ref8","first-page":"596","article-title":"FixMatch: Simplifying semi-supervised learning with consistency and confidence","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"33","author":"Sohn"},{"key":"ref9","first-page":"18408","article-title":"FlexMatch: Boosting semi-supervised learning with curriculum pseudo labeling","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"34","author":"Zhang"},{"key":"ref10","first-page":"2147","article-title":"MixText: Linguistically-informed interpolation of hidden space for semi-supervised text classification","volume-title":"Proc. 58th Annu. Meeting Assoc. Comput. Linguistics","author":"Chen"},{"key":"ref11","first-page":"6256","article-title":"Unsupervised data augmentation for consistency training","volume-title":"Proc. NIPS","volume":"33","author":"Xie"},{"key":"ref12","first-page":"5044","article-title":"Semi-supervised text classification with balanced deep representation distributions","volume-title":"Proc. 59th Annu. Meeting Assoc. Comput. Linguistics 11th Int. Joint Conf. Natural Lang. Process.","author":"Li"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1145\/3437963.3441814"},{"key":"ref14","first-page":"4163","article-title":"TinyBERT: Distilling BERT for natural language understanding","volume-title":"Findings of the Association for Computational Linguistics","author":"Jiao","year":"2020"},{"key":"ref15","article-title":"DistilBERT, a distilled version of BERT: Smaller, faster, cheaper and lighter","author":"Sanh","year":"2019","journal-title":"arXiv:1910.01108"},{"key":"ref16","article-title":"Distilling task-specific knowledge from BERT into simple neural networks","author":"Tang","year":"2019","journal-title":"arXiv:1903.12136"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.48550\/arXiv.1503.02531"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.acl-long.279"},{"key":"ref19","first-page":"1681","article-title":"Deep unordered composition rivals syntactic methods for text classification","volume-title":"Proc. 53rd Annu. Meeting Assoc. Comput. Linguistics 7th Int. Joint Conf. Natural Lang. Process.","author":"Iyyer"},{"key":"ref20","first-page":"1532","article-title":"GloVe: Global vectors for word representation","volume-title":"Proc. Conf. Empirical Methods Natural Lang. Process.","author":"Pennington"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.48550\/ARXIV.1706.03762"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1356"},{"key":"ref23","first-page":"1073","article-title":"Linguistic knowledge and transferability of contextual representations","volume-title":"Proc. Conf. North","author":"Liu"},{"key":"ref24","first-page":"7222","article-title":"Probing pretrained language models for lexical semantics","volume-title":"Proc. Conf. Empirical Methods Natural Lang. Process. (EMNLP)","author":"Vuli\u0107"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1186\/s40537-022-00564-9"},{"key":"ref26","first-page":"5530","article-title":"Unsupervised fine-tuning for text clustering","volume-title":"Proc. 28th Int. Conf. Comput. Linguistics","author":"Huang"},{"key":"ref27","article-title":"Self-supervised document clustering based on BERT with data augment","author":"Shi","year":"2020","journal-title":"arXiv:2011.08523"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2021.naacl-industry.39"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1145\/2668332.2668347"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-20893-6_39"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/TMI.2020.2996645"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.3115\/v1\/P15-1161"},{"key":"ref33","first-page":"1914","article-title":"Semi-supervised sequence modeling with cross-view training","volume-title":"Proc. Conf. Empirical Methods Natural Lang. Process.","author":"Clark"},{"key":"ref34","first-page":"5880","article-title":"Variational pretraining for semi-supervised text classification","volume-title":"Proc. 57th Annu. Meeting Assoc. Comput. Linguistics","author":"Gururangan"},{"article-title":"Pseudo-label: The simple and efficient semi-supervised learning method for deep neural networks","volume-title":"Proc. ICML Workshop, Challenges Represent. Learn. (WREPL)","author":"Lee","key":"ref35"},{"article-title":"Adversarial training methods for semi-supervised text classification","volume-title":"Proc. Int. Conf. Learn. Represent.","author":"Miyato","key":"ref36"},{"key":"ref37","first-page":"4323","article-title":"Patient knowledge distillation for BERT model compression","volume-title":"Proc. Conf. Empirical Methods Natural Lang. Process. 9th Int. Joint Conf. Natural Lang. Process. (EMNLP-IJCNLP)","author":"Sun"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.naacl-main.169"},{"key":"ref39","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P18-1041"},{"key":"ref40","first-page":"6382","article-title":"EDA: Easy data augmentation techniques for boosting performance on text classification tasks","volume-title":"Proc. Conf. Empirical Methods Natural Lang. Process. 9th Int. Joint Conf. Natural Lang. Process. (EMNLP-IJCNLP)","author":"Wei"},{"key":"ref41","first-page":"142","article-title":"Learning word vectors for sentiment analysis","volume-title":"Proc. 49th Annu. Meet. Assoc. Comput. Linguistics, Hum. Lang. Technol.","author":"Maas"},{"article-title":"Importance of semantic representation: Dataless classification","volume-title":"Proc. AAAI","author":"Chang","key":"ref42"},{"key":"ref43","first-page":"1813","article-title":"Dbpedia: A multilingual cross-domain knowledge base","volume-title":"Proc. LREC","author":"Mendes"},{"key":"ref44","article-title":"Character-level convolutional networks for text classification","volume-title":"Proc. Adv. Neural Inf. Process. Syst. (NIPS)","volume":"28","author":"Zhang"}],"container-title":["IEEE Access"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/6287639\/10005208\/10054053.pdf?arnumber=10054053","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,2,13]],"date-time":"2024-02-13T19:08:01Z","timestamp":1707851281000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10054053\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2023]]},"references-count":44,"URL":"https:\/\/doi.org\/10.1109\/access.2023.3249214","relation":{},"ISSN":["2169-3536"],"issn-type":[{"type":"electronic","value":"2169-3536"}],"subject":[],"published":{"date-parts":[[2023]]}}}