{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,4,5]],"date-time":"2025-04-05T09:40:02Z","timestamp":1743846002222,"version":"3.40.3"},"reference-count":18,"publisher":"IEEE","license":[{"start":{"date-parts":[[2024,10,27]],"date-time":"2024-10-27T00:00:00Z","timestamp":1729987200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,10,27]],"date-time":"2024-10-27T00:00:00Z","timestamp":1729987200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024,10,27]]},"DOI":"10.1109\/ieeeconf60004.2024.10942686","type":"proceedings-article","created":{"date-parts":[[2025,4,4]],"date-time":"2025-04-04T18:20:20Z","timestamp":1743790820000},"page":"1248-1252","source":"Crossref","is-referenced-by-count":0,"title":["Parameter Efficient Fine-tuning of Transformer-Based Language Models Using Dataset Pruning"],"prefix":"10.1109","author":[{"given":"Sayed Mohammadreza","family":"Tayaranian Hosseini","sequence":"first","affiliation":[{"name":"McGill University Montreal,Department of Electrical and Computer Engineering,Quebec,Canada"}]},{"given":"Seyyed Hasan","family":"Mozafari","sequence":"additional","affiliation":[{"name":"McGill University Montreal,Department of Electrical and Computer Engineering,Quebec,Canada"}]},{"given":"James","family":"Clark","sequence":"additional","affiliation":[{"name":"McGill University Montreal,Department of Electrical and Computer Engineering,Quebec,Canada"}]},{"given":"Brett","family":"Meyer","sequence":"additional","affiliation":[{"name":"McGill University Montreal,Department of Electrical and Computer Engineering,Quebec,Canada"}]},{"given":"Warren","family":"Gross","sequence":"additional","affiliation":[{"name":"McGill University Montreal,Department of Electrical and Computer Engineering,Quebec,Canada"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1145\/3641289"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.48550\/ARXIV.1706.03762"},{"volume-title":"Llama: Open and efficient foundation language models","year":"2023","author":"Touvron","key":"ref3"},{"key":"ref4","article-title":"Scaling laws for neural language models","author":"Kaplan","year":"2020","journal-title":"arXiv preprint"},{"volume-title":"Opt: Open pre-trained transformer language models","year":"2022","author":"Zhang","key":"ref5"},{"key":"ref6","first-page":"4171","article-title":"BERT: Pre-training of deep bidirectional transformers for language understanding","volume-title":"Proceedings of the 2019 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long and Short Papers)","author":"Devlin"},{"article-title":"LoRA: Low-rank adaptation of large language models","volume-title":"International Conference on Learning Representations","author":"Hu","key":"ref7"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/ISCAS48785.2022.9937567"},{"key":"ref9","first-page":"7319","article-title":"Intrinsic dimensionality explains the effectiveness of language model fine-tuning","volume-title":"Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing (Volume 1: Long Papers)","author":"Aghajanyan"},{"key":"ref10","first-page":"2790","article-title":"Parameter-efficient transfer learning for NLP","volume-title":"Proceedings of the 36th International Conference on Machine Learning, ser. Proceedings of Machine Learning Research","volume":"97","author":"Houlsby"},{"volume-title":"Automatic pruning of fine-tuning datasets for transformer-based language models","year":"2024","author":"Tayaranian","key":"ref11"},{"key":"ref12","first-page":"12991","article-title":"Lst: Ladder side-tuning for parameter and memory efficient transfer learning","volume-title":"Advances in Neural Information Processing Systems","volume":"35","author":"Sung","year":"2022"},{"volume-title":"RoBERTa: A robustly optimized BERT pretraining approach","year":"2019","author":"Liu","key":"ref13"},{"key":"ref14","doi-asserted-by":"crossref","DOI":"10.18653\/v1\/W18-5446","article-title":"GLUE: A multi-task benchmark and analysis platform for natural language understanding","volume-title":"International Conference on Learning Representations","author":"Wang"},{"key":"ref15","first-page":"38","article-title":"Transformers: State-of-the-art natural language processing","volume-title":"Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations","author":"Wolf"},{"volume-title":"Peft: State-of-the-art parameter-efficient fine-tuning methods","year":"2022","author":"Mangrulkar","key":"ref16"},{"key":"ref17","article-title":"LoRA learns less and forgets less","author":"Biderman","year":"2024","journal-title":"Transactions on Machine Learning Research"},{"key":"ref18","first-page":"142","article-title":"Learning word vectors for sentiment analysis","volume-title":"Proceedings of the 49th Annual Meeting of the Association for Computational Linguistics: Human Language Technologies","author":"Maas"}],"event":{"name":"2024 58th Asilomar Conference on Signals, Systems, and Computers","start":{"date-parts":[[2024,10,27]]},"location":"Pacific Grove, CA, USA","end":{"date-parts":[[2024,10,30]]}},"container-title":["2024 58th Asilomar Conference on Signals, Systems, and Computers"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/10942562\/10942606\/10942686.pdf?arnumber=10942686","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,4,5]],"date-time":"2025-04-05T09:01:32Z","timestamp":1743843692000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10942686\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,10,27]]},"references-count":18,"URL":"https:\/\/doi.org\/10.1109\/ieeeconf60004.2024.10942686","relation":{},"subject":[],"published":{"date-parts":[[2024,10,27]]}}}