{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,1,17]],"date-time":"2026-01-17T07:40:22Z","timestamp":1768635622510,"version":"3.49.0"},"reference-count":31,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2026,1,1]],"date-time":"2026-01-01T00:00:00Z","timestamp":1767225600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/legalcode"}],"funder":[{"name":"NextGenerationEU framework through the Project \u201cDEEPWAVE\u201d at the University of Zagreb, Faculty of Electrical Engineering and Computing"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Access"],"published-print":{"date-parts":[[2026]]},"DOI":"10.1109\/access.2025.3648050","type":"journal-article","created":{"date-parts":[[2025,12,24]],"date-time":"2025-12-24T18:46:53Z","timestamp":1766602013000},"page":"5643-5659","source":"Crossref","is-referenced-by-count":0,"title":["Distributed Modularization of Thought: Lets Small Rival Large LMs"],"prefix":"10.1109","volume":"14","author":[{"ORCID":"https:\/\/orcid.org\/0009-0006-0315-2589","authenticated-orcid":false,"given":"Matej","family":"Zubi\u0107","sequence":"first","affiliation":[{"name":"Faculty of Electrical Engineering and Computing, University of Zagreb, Zagreb, Croatia"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-9969-1849","authenticated-orcid":false,"given":"Dario","family":"Bojanjac","sequence":"additional","affiliation":[{"name":"Faculty of Electrical Engineering and Computing, University of Zagreb, Zagreb, Croatia"}]}],"member":"263","reference":[{"key":"ref1","article-title":"Attention is all you need","author":"Vaswani","year":"2017","journal-title":"arXiv:1706.03762"},{"key":"ref2","article-title":"Language models are few-shot learners","author":"Brown","year":"2020","journal-title":"arXiv:2005.14165"},{"key":"ref3","article-title":"Evaluating large language models trained on code","author":"Chen","year":"2021","journal-title":"arXiv:2107.03374"},{"key":"ref4","article-title":"Program synthesis with large language models","author":"Austin","year":"2021","journal-title":"arXiv:2108.07732"},{"key":"ref5","article-title":"Code llama: Open foundation models for code","author":"Rozi\u00e8re","year":"2023","journal-title":"arXiv:2308.12950"},{"key":"ref6","article-title":"Qwen2.5-coder technical report","volume-title":"arXiv:2409.12186","author":"Hui","year":"2024"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1145\/3719664"},{"key":"ref8","article-title":"GPT-4 technical report","volume-title":"arXiv:2303.08774","author":"Achiam","year":"2023"},{"key":"ref9","article-title":"Quantizing large language models for code generation: A differentiated replication","author":"Giagnorio","year":"2025","journal-title":"arXiv:2503.07103"},{"key":"ref10","article-title":"Transforming software development: Evaluating the efficiency and challenges of GitHub copilot in real-world projects","author":"Pandey","year":"2024","journal-title":"arXiv:2406.17910"},{"key":"ref11","article-title":"The ultimate guide to fine-tuning LLMs from basics to breakthroughs: An exhaustive review of technologies, research, best practices, applied research challenges and opportunities","author":"Parthasarathy","year":"2024","journal-title":"arXiv:2408.13296"},{"key":"ref12","article-title":"A comparative study on code generation with transformers","author":"Das","year":"2024","journal-title":"arXiv:2412.05749"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.48550\/arXiv.1503.02531"},{"key":"ref14","article-title":"QLoRA: Efficient finetuning of quantized LLMs","author":"Dettmers","year":"2023","journal-title":"arXiv:2305.14314"},{"key":"ref15","article-title":"LoRA: Low-rank adaptation of large language models","author":"Hu","year":"2021","journal-title":"arXiv:2106.09685"},{"key":"ref16","article-title":"BigCodeBench: Benchmarking code generation with diverse function calls and complex instructions","author":"Zhuo","year":"2024","journal-title":"arXiv:2406.15877"},{"key":"ref17","article-title":"LLM4Code: A survey of large language models\u00c2 for code","author":"Zhu","year":"2023","journal-title":"arXiv:2306.08997"},{"key":"ref18","article-title":"Improving language understanding by generative pre-training","author":"Radford","year":"2018","journal-title":"arXiv:1801.10198"},{"key":"ref19","article-title":"Language models are unsupervised multitask learners","author":"Radford","year":"2019"},{"key":"ref20","volume-title":"Introducing O3 and O4 Mini","year":"2025"},{"key":"ref21","volume-title":"Introducing Llama 4 Herd","year":"2025"},{"key":"ref22","article-title":"CodeGemma: Open code models based on gemma","author":"Team","year":"2024","journal-title":"arXiv:2406.11409"},{"key":"ref23","article-title":"Prefix-tuning: Optimizing continuous prompts for generation","author":"Liu","year":"2021","journal-title":"arXiv:2101.00190"},{"key":"ref24","article-title":"Retrieval-augmented generation for knowledge-intensive NLP tasks","author":"Lewis","year":"2020","journal-title":"arXiv:2005.11401"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2025.findings-naacl.176"},{"key":"ref26","article-title":"Training language models to follow instructions with human feedback","author":"Ouyang","year":"2022","journal-title":"arXiv:2203.02155"},{"key":"ref27","article-title":"MoT: Modularization-of-thought prompting for effective code generation","author":"Wu","year":"2025","journal-title":"arXiv:2503.12483"},{"key":"ref28","article-title":"SVFT: Parameter-efficient fine-tuning with singular vectors","author":"Lingam","year":"2024","journal-title":"arXiv:2405.19597"},{"key":"ref29","article-title":"SSVD: Structured SVD for parameter-efficient fine-tuning and benchmarking under domain shift in ASR","author":"Wang","year":"2025","journal-title":"arXiv:2509.02830"},{"key":"ref30","article-title":"KaSA: Knowledge-aware singular-value adaptation of foundation models","author":"Wang","year":"2024","journal-title":"arXiv:2406.12345"},{"key":"ref31","volume-title":"PEFT Welcomes New Merging Methods","year":"2024"}],"container-title":["IEEE Access"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/6287639\/11323511\/11314500.pdf?arnumber=11314500","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,1,16]],"date-time":"2026-01-16T20:51:05Z","timestamp":1768596665000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11314500\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2026]]},"references-count":31,"URL":"https:\/\/doi.org\/10.1109\/access.2025.3648050","relation":{},"ISSN":["2169-3536"],"issn-type":[{"value":"2169-3536","type":"electronic"}],"subject":[],"published":{"date-parts":[[2026]]}}}