{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,11]],"date-time":"2026-03-11T17:43:48Z","timestamp":1773251028517,"version":"3.50.1"},"reference-count":15,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"1","license":[{"start":{"date-parts":[[2026,1,1]],"date-time":"2026-01-01T00:00:00Z","timestamp":1767225600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2026,1,1]],"date-time":"2026-01-01T00:00:00Z","timestamp":1767225600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2026,1,1]],"date-time":"2026-01-01T00:00:00Z","timestamp":1767225600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62576213"],"award-info":[{"award-number":["62576213"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Commun. Mag."],"published-print":{"date-parts":[[2026,1]]},"DOI":"10.1109\/mcom.001.2400602","type":"journal-article","created":{"date-parts":[[2025,12,11]],"date-time":"2025-12-11T18:45:31Z","timestamp":1765478731000},"page":"94-101","source":"Crossref","is-referenced-by-count":5,"title":["Fine-Tuning and Deploying Large Language Models Over Edges: Issues and Approaches"],"prefix":"10.1109","volume":"64","author":[{"given":"Yanjie","family":"Dong","sequence":"first","affiliation":[{"name":"SMBU,Shenzhen,China"}]},{"given":"Haijun","family":"Zhang","sequence":"additional","affiliation":[{"name":"USTB,Beijing,China"}]},{"given":"Chengming","family":"Li","sequence":"additional","affiliation":[{"name":"SMBU,Shenzhen,China"}]},{"given":"Song","family":"Guo","sequence":"additional","affiliation":[{"name":"HKUST, HK SAR,China"}]},{"given":"Victor C. M.","family":"Leung","sequence":"additional","affiliation":[{"name":"SMBU,Shenzhen,China"}]},{"given":"Xiping","family":"Hu","sequence":"additional","affiliation":[{"name":"SMBU,Shenzhen,China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/JSAC.2021.3118346"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1109\/MWC.008.2300516"},{"key":"ref3","first-page":"53,038","article-title":"Fine-Tuning Language Models with Just Forward Passes","volume-title":"NeurlPS","volume":"36","author":"Malladi"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP48485.2024.10447454"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.emnlp-main.488"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1109\/ISIT63088.2025.11195372"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1038\/s42256-023-00626-4"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2023.findings-acl.632"},{"key":"ref9","article-title":"Just One Byte (Per Gradient): A Note on Low-Bandwidth Decentralized Language Model Finetuning Using Shared Randomness","author":"Zelikman","year":"2023","journal-title":"arXiv preprint"},{"key":"ref10","first-page":"579","article-title":"FwdLLM: Efficient Federated Finetuning of Large Language Models with Perturbed Inferences","volume-title":"USENIX ATC","author":"Xu"},{"key":"ref11","article-title":"Federated Full Parameter Tuning of Billion-Sized Language Models with Communication Cost Under 18 Kilobytes","author":"Qin","year":"2024","journal-title":"ICML, Vienna, Austria"},{"key":"ref12","article-title":"On-Policy Distillation of Language Models: Learning from Self-Generated Mistakes","author":"Agarwal","year":"2024","journal-title":"ICLR, Vienna, Austria"},{"key":"ref13","first-page":"21,702","article-title":"LLM-pruner: On the Structural Pruning of Large Language Models","volume-title":"NeurIPS","volume":"36","author":"Ma"},{"key":"ref14","article-title":"OPTQ: Accurate Quantization for Generative Pre-Trained Transformers","author":"Frantar","year":"2023","journal-title":"ICLR, Kigali, Rwanda"},{"key":"ref15","first-page":"10,323","article-title":"SparseGPT: Massive \u201cLanguage Models Can Be Accurately Pruned in One-Shot","author":"Frantar","year":"2023","journal-title":"ICML, Honolulu, HI, USA"}],"container-title":["IEEE Communications Magazine"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/35\/11352382\/11297425.pdf?arnumber=11297425","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,1,27]],"date-time":"2026-01-27T05:48:27Z","timestamp":1769492907000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11297425\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2026,1]]},"references-count":15,"journal-issue":{"issue":"1"},"URL":"https:\/\/doi.org\/10.1109\/mcom.001.2400602","relation":{},"ISSN":["0163-6804","1558-1896"],"issn-type":[{"value":"0163-6804","type":"print"},{"value":"1558-1896","type":"electronic"}],"subject":[],"published":{"date-parts":[[2026,1]]}}}