{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,19]],"date-time":"2026-03-19T15:25:08Z","timestamp":1773933908744,"version":"3.50.1"},"reference-count":17,"publisher":"Springer Science and Business Media LLC","issue":"1","license":[{"start":{"date-parts":[[2025,11,19]],"date-time":"2025-11-19T00:00:00Z","timestamp":1763510400000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2025,11,19]],"date-time":"2025-11-19T00:00:00Z","timestamp":1763510400000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["Cluster Comput"],"published-print":{"date-parts":[[2026,2]]},"DOI":"10.1007\/s10586-025-05865-1","type":"journal-article","created":{"date-parts":[[2025,11,19]],"date-time":"2025-11-19T15:59:53Z","timestamp":1763567993000},"update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":3,"title":["Parameter-efficient fine-tuning of LLaMA models for financial sentiment classification"],"prefix":"10.1007","volume":"29","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-8427-9971","authenticated-orcid":false,"given":"Seda BAYAT","family":"TOKSOZ","sequence":"first","affiliation":[]},{"given":"G\u00fcltekin","family":"ISIK","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2025,11,19]]},"reference":[{"key":"5865_CR1","unstructured":"Araci, D.: FinBERT: Financial sentiment analysis with pre-trained language models. arXiv preprint arXiv:1908.10063. (2019)"},{"key":"5865_CR2","doi-asserted-by":"crossref","unstructured":"Ben Zaken, E., Goldberg, Y., Ravfogel, S.: BitFit: Simple parameter-efficient fine-tuning for transformer-based masked language-models. In Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers) (pp. 1\u20139). (2022)","DOI":"10.18653\/v1\/2022.acl-short.1"},{"key":"5865_CR3","unstructured":"Chen, T., Xu, B., Zhang, C., Guestrin, C.: Training deep nets with sublinear memory cost. arXiv preprint arXiv:1604.06174 (2016)."},{"key":"5865_CR4","unstructured":"Choe, J., Noh, K., Kim, N., Ahn, S., Jung, W.: Exploring the impact of corpus diversity on financial pretrained language models. In Proceedings of the 4th Financial Narrative Processing Workshop (pp. 123\u2013135). (2023)"},{"key":"5865_CR5","doi-asserted-by":"crossref","unstructured":"Dettmers, T., Pagnoni, A., Holtzman, A., Zettlemoyer, L.: QLoRA: Efficient finetuning of quantized LLMs. In Advances in Neural Information Processing Systems (Vol. 36). (2023)","DOI":"10.52202\/075280-0441"},{"issue":"9","key":"5865_CR6","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1145\/3649451","volume":"56","author":"K Du","year":"2024","unstructured":"Du, K., Xing, F., Mao, R., Cambria, E.: Financial sentiment analysis: Techniques and applications. ACM Comput. Surveys. 56(9), 1\u201342 (2024)","journal-title":"ACM Comput. Surveys"},{"key":"5865_CR7","doi-asserted-by":"crossref","unstructured":"Gilbert, C.H.E.: VADER: A parsimonious rule-based model for sentiment analysis of social media text. In Proceedings of the International AAAI Conference on Web and Social Media. 8(1), 216\u2013225 (2014)","DOI":"10.1609\/icwsm.v8i1.14550"},{"key":"5865_CR8","unstructured":"Houlsby, N., Giurgiu, A., Jastrzebski, S., Morrone, B., De Laroussilhe, Q., Gesmundo, A., Gelly, S.: Parameter-efficient transfer learning for NLP. In International Conference on Machine Learning (pp. 2790\u20132799). (2019)"},{"key":"5865_CR9","unstructured":"Hu, E.J., Shen, Y., Wallis, P., Allen-Zhu, Z., Li, Y., Wang, S., Chen, W.: LoRA: Low-rank adaptation of large language models. In International Conference on Learning Representations. (2022)"},{"key":"5865_CR10","doi-asserted-by":"crossref","unstructured":"Kim, W., Sp\u00f6rer, J., Lee, C.L., Handschuh, S.: Is small really beautiful for central bank communication? Evaluating language models for finance. In Proceedings of the 5th ACM International Conference on AI in Finance (pp. 626\u2013633). (2024)","DOI":"10.1145\/3677052.3698675"},{"key":"5865_CR11","doi-asserted-by":"crossref","unstructured":"Li, X.L., Liang, P.: Prefix-tuning: Optimizing continuous prompts for generation. In Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics (pp. 4582\u20134597). (2021)","DOI":"10.18653\/v1\/2021.acl-long.353"},{"issue":"4","key":"5865_CR12","doi-asserted-by":"publisher","first-page":"782","DOI":"10.1002\/asi.23062","volume":"65","author":"P Malo","year":"2014","unstructured":"Malo, P., Sinha, A., Korhonen, P., Wallenius, J., Takala, P.: Good debt or bad debt: Detecting semantic orientations in economic texts. J. Association Inform. Sci. Technol. 65(4), 782\u2013796 (2014)","journal-title":"J. Association Inform. Sci. Technol."},{"key":"5865_CR13","unstructured":"Micikevicius, P., Narang, S., Alben, J., Diamos, G., Elsen, E., Garcia, D., Wu, H.: Mixed precision training. In International Conference on Learning Representations. (2018)"},{"key":"5865_CR14","doi-asserted-by":"crossref","unstructured":"Rajbhandari, S., Rasley, J., Ruwase, O., He, Y.: ZeRO: Memory optimizations toward training trillion parameter models. In Proceedings of the International Conference for High Performance Computing, Networking, Storage and Analysis (pp. 1\u201316). (2020)","DOI":"10.1109\/SC41405.2020.00024"},{"key":"5865_CR15","unstructured":"Ren, J., Rajbhandari, S., Aminabadi, R.Y., Ruwase, O., Yang, S., Zhang, M., He, Y.: ZeRO-Offload: Democratizing billion-scale model training. In USENIX Annual Technical Conference (pp. 551\u2013564). (2021)"},{"key":"5865_CR16","unstructured":"Touvron, H., Martin, L., Stone, K., Albert, P., Almahairi, A., Babaei, Y., Scialom, T.: Llama 2: Open foundation and fine-tuned chat models. arXiv preprint arXiv:2307.09288. (2023)"},{"key":"5865_CR17","unstructured":"Xu, Y., Xie, L., Gu, X., Chen, X., Chang, H., Zhang, H., Tian, Q.: QA-LoRA: Quantization-aware low-rank adaptation of large language models. arXiv preprint arXiv:2309.14717. (2023)"}],"container-title":["Cluster Computing"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s10586-025-05865-1.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s10586-025-05865-1","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s10586-025-05865-1.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,3,19]],"date-time":"2026-03-19T13:07:29Z","timestamp":1773925649000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s10586-025-05865-1"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,11,19]]},"references-count":17,"journal-issue":{"issue":"1","published-print":{"date-parts":[[2026,2]]}},"alternative-id":["5865"],"URL":"https:\/\/doi.org\/10.1007\/s10586-025-05865-1","relation":{},"ISSN":["1386-7857","1573-7543"],"issn-type":[{"value":"1386-7857","type":"print"},{"value":"1573-7543","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025,11,19]]},"assertion":[{"value":"9 March 2025","order":1,"name":"received","label":"Received","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"27 October 2025","order":2,"name":"revised","label":"Revised","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"6 November 2025","order":3,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"19 November 2025","order":4,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Declarations"}},{"value":"The authors declare no competing interests.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Competing interests"}},{"value":"This study does not involve human subjects, animal subjects, or sensitive personal data. The research was conducted using publicly available datasets and computational resources. Therefore, ethics approval was not applicable for this research.","order":3,"name":"Ethics","group":{"name":"EthicsHeading","label":"Ethical approval"}}],"article-number":"41"}}