{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,1,10]],"date-time":"2026-01-10T19:28:51Z","timestamp":1768073331677,"version":"3.49.0"},"reference-count":35,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"5","license":[{"start":{"date-parts":[[2025,5,1]],"date-time":"2025-05-01T00:00:00Z","timestamp":1746057600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2025,5,1]],"date-time":"2025-05-01T00:00:00Z","timestamp":1746057600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,5,1]],"date-time":"2025-05-01T00:00:00Z","timestamp":1746057600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"name":"State Street Zhejiang University Technology Center"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Knowl. Data Eng."],"published-print":{"date-parts":[[2025,5]]},"DOI":"10.1109\/tkde.2025.3543422","type":"journal-article","created":{"date-parts":[[2025,2,18]],"date-time":"2025-02-18T18:26:02Z","timestamp":1739903162000},"page":"2253-2265","source":"Crossref","is-referenced-by-count":1,"title":["Build a Good Human-Free Prompt Tuning: Jointly Pre-Trained Template and Verbalizer for Few-Shot Classification"],"prefix":"10.1109","volume":"37","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-8341-1467","authenticated-orcid":false,"given":"Mouxiang","family":"Chen","sequence":"first","affiliation":[{"name":"College of Computer Science and Technology, Zhejiang University, Hangzhou, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-2353-5406","authenticated-orcid":false,"given":"Han","family":"Fu","sequence":"additional","affiliation":[{"name":"College of Computer Science and Technology, Zhejiang University, Hangzhou, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-6934-2354","authenticated-orcid":false,"given":"Chenghao","family":"Liu","sequence":"additional","affiliation":[{"name":"Salesforce Research Asia, Singapore"}]},{"given":"Xiaoyun Joy","family":"Wang","sequence":"additional","affiliation":[{"name":"State Street Technology (Zhejiang) Ltd., Hangzhou, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-9381-7359","authenticated-orcid":false,"given":"Zhuo","family":"Li","sequence":"additional","affiliation":[{"name":"State Street Technology (Zhejiang) Ltd., Hangzhou, China"}]},{"given":"Jianling","family":"Sun","sequence":"additional","affiliation":[{"name":"College of Computer Science and Technology, Zhejiang University, Hangzhou, China"}]}],"member":"263","reference":[{"key":"ref1","first-page":"4171","article-title":"BERT: Pre-training of deep bidirectional transformers for language understanding","volume-title":"Proc. Conf. North Amer. Chapter Assoc. Comput. Linguistics-Hum. Lang. Technol.","author":"Devlin"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.3390\/info10040150"},{"issue":"140","key":"ref3","first-page":"1","article-title":"Exploring the limits of transfer learning with a unified text-to-text transformer","volume":"21","author":"Raffel","year":"2020","journal-title":"J. Mach. Learn. Res."},{"key":"ref4","first-page":"15\u2009787","article-title":"Flex: Unifying evaluation for few-shot NLP","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Bragg"},{"key":"ref5","first-page":"255","article-title":"Exploiting cloze-questions for few-shot text classification and natural language inference","volume-title":"Proc. 16th Conf. Eur. Chapter Assoc. Comput. Linguistics","author":"Schick"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1016\/j.aiopen.2023.08.012"},{"key":"ref7","first-page":"3045","article-title":"The power of scale for parameter-efficient prompt tuning","volume-title":"Proc. Conf. Empirical Methods Natural Lang. Process.","author":"Lester"},{"key":"ref8","first-page":"4582","article-title":"Prefix-tuning: Optimizing continuous prompts for generation","volume-title":"Proc. 59th Annu. Meeting Assoc. Comput. Linguistics-11th Int. Joint Conf. Natural Lang. Process.","author":"Li"},{"key":"ref9","first-page":"7014","article-title":"Prototypical verbalizer for prompt-based few-shot tuning","volume-title":"Proc. 60th Annu. Meeting Assoc. Comput. Linguistics","author":"Cui"},{"key":"ref10","first-page":"4921","article-title":"WARP: Word-level adversarial ReProgramming","volume-title":"Proc. 59th Annu. Meeting Assoc. Comput. Linguistics-11th Int. Joint Conf. Natural Lang. Process.","author":"Hambardzumyan"},{"key":"ref11","first-page":"8410","article-title":"PPT: Pre-trained prompt tuning for few-shot learning","volume-title":"Proc. 60th Annu. Meeting Assoc. Comput. Linguistics","author":"Gu"},{"key":"ref12","first-page":"5039","article-title":"SPoT: Better frozen model adaptation through soft prompt transfer","volume-title":"Proc. 60th Annu. Meeting Assoc. Comput. Linguistics","author":"Vu"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.ijcnlp-short.8"},{"key":"ref14","article-title":"Representation learning with contrastive predictive coding","author":"Oord","year":"2018"},{"key":"ref15","first-page":"649","article-title":"Character-level convolutional networks for text classification","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Zhang"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.3233\/SW-140134"},{"key":"ref17","first-page":"421","article-title":"On the robustness of authorship attribution based on character N-gram features","volume":"21","author":"Stamatatos","year":"2013","journal-title":"J. Law Policy"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/E17-1107"},{"key":"ref19","article-title":"Pointer sentinel mixture models","author":"Merity","year":"2016"},{"key":"ref20","first-page":"105","article-title":"OpenPrompt: An open-source framework for prompt-learning","volume-title":"Proc. 60th Annu. Meeting Assoc. Comput. Linguistics","author":"Ding"},{"key":"ref21","article-title":"RoBERTa: A robustly optimized BERT pretraining approach","author":"Liu","year":"2019"},{"key":"ref22","first-page":"2225","article-title":"Knowledgeable prompt-tuning: Incorporating knowledge into prompt verbalizer for text classification","volume-title":"Proc. 60th Annu. Meeting Assoc. Comput. Linguistics","author":"Hu"},{"key":"ref23","first-page":"30","article-title":"Prompt-based zero-shot text classification with conceptual knowledge","volume-title":"Proc. 61st Annu. Meeting Assoc. Comput. Linguistics","author":"Wang"},{"key":"ref24","first-page":"1112","article-title":"A broad-coverage challenge corpus for sentence understanding through inference","volume-title":"Proc. Conf. North Amer. Chapter Assoc. Comput. Linguistics-Hum. Lang. Technol.","author":"Williams"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2021.emnlp-main.552"},{"key":"ref26","first-page":"1","article-title":"LoRA: Low-rank adaptation of large language models","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Hu"},{"key":"ref27","article-title":"PEFT: State-of-the-art parameter-efficient fine-tuning methods","author":"Mangrulkar","year":"2022"},{"key":"ref28","first-page":"1877","article-title":"Language models are few-shot learners","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Brown"},{"key":"ref29","first-page":"3816","article-title":"Making pre-trained language models better few-shot learners","volume-title":"Proc. 59th Annu. Meeting Assoc. Comput. Linguistics-11th Int. Joint Conf. Natural Lang. Process.","author":"Gao"},{"key":"ref30","first-page":"2463","article-title":"Language models as knowledge bases?","volume-title":"Proc. Conf. Empirical Methods Natural Lang. Process.-9th Int. Joint Conf. Natural Lang. Process.","author":"Petroni"},{"key":"ref31","first-page":"2339","article-title":"It\u2019s not just size that matters: Small language models are also few-shot learners","volume-title":"Proc. Conf. North Amer. Chapter Assoc. Comput. Linguistics-Hum. Lang. Technol.","author":"Schick"},{"key":"ref32","first-page":"1","article-title":"Differentiable prompt makes pre-trained language models better few-shot learners","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Zhang"},{"key":"ref33","first-page":"5569","article-title":"Automatically identifying words that can serve as labels for few-shot text classification","volume-title":"Proc. 28th Int. Conf. Comput. Linguistics","author":"Schick"},{"key":"ref34","first-page":"4222","article-title":"AutoPrompt: Eliciting knowledge from language models with automatically generated prompts","volume-title":"Proc. Conf. Empirical Methods Natural Lang. Process.","author":"Shin"},{"key":"ref35","first-page":"2918","article-title":"Hierarchical verbalizer for few-shot hierarchical text classification","volume-title":"Proc. 61st Annu. Meeting Assoc. Comput. Linguistics","author":"Ji"}],"container-title":["IEEE Transactions on Knowledge and Data Engineering"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/69\/10948402\/10891939.pdf?arnumber=10891939","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,4,7]],"date-time":"2025-04-07T03:24:52Z","timestamp":1743996292000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10891939\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,5]]},"references-count":35,"journal-issue":{"issue":"5"},"URL":"https:\/\/doi.org\/10.1109\/tkde.2025.3543422","relation":{},"ISSN":["1041-4347","1558-2191","2326-3865"],"issn-type":[{"value":"1041-4347","type":"print"},{"value":"1558-2191","type":"electronic"},{"value":"2326-3865","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025,5]]}}}