{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,10]],"date-time":"2026-04-10T20:16:40Z","timestamp":1775852200345,"version":"3.50.1"},"reference-count":39,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2026,1,1]],"date-time":"2026-01-01T00:00:00Z","timestamp":1767225600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2026,1,1]],"date-time":"2026-01-01T00:00:00Z","timestamp":1767225600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2026,1,1]],"date-time":"2026-01-01T00:00:00Z","timestamp":1767225600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"name":"National Science Fund for Distinguished Young Scholars","award":["62325106"],"award-info":[{"award-number":["62325106"]}]},{"name":"Key Program of the National Natural Science Foundation of China","award":["62031019"],"award-info":[{"award-number":["62031019"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Commun."],"published-print":{"date-parts":[[2026]]},"DOI":"10.1109\/tcomm.2025.3626010","type":"journal-article","created":{"date-parts":[[2025,10,27]],"date-time":"2025-10-27T17:57:39Z","timestamp":1761587859000},"page":"307-321","source":"Crossref","is-referenced-by-count":7,"title":["Large Language Model Enabled Multi-Task Physical Layer Network"],"prefix":"10.1109","volume":"74","author":[{"given":"Tianyue","family":"Zheng","sequence":"first","affiliation":[{"name":"Department of Electronic Engineering and the State Key Laboratory of Space Network and Communications, Tsinghua University, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-4250-7315","authenticated-orcid":false,"given":"Linglong","family":"Dai","sequence":"additional","affiliation":[{"name":"Department of Electronic Engineering and the State Key Laboratory of Space Network and Communications, Tsinghua University, Beijing, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/MCOM.001.2001187"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1109\/TCCN.2017.2758370"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/LWC.2018.2818160"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/LWC.2018.2832128"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1109\/JSAC.2022.3191334"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1109\/TSP.2020.2976585"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/MVT.2022.3164758"},{"issue":"8","key":"ref8","first-page":"9","article-title":"Language models are unsupervised multitask learners","volume":"1","author":"Alec","year":"2019","journal-title":"OpenAI blog"},{"key":"ref9","article-title":"Llama 2: Open foundation and fine-tuned chat models","author":"Touvron","year":"2023","journal-title":"arXiv:2307.09288"},{"key":"ref10","first-page":"1","article-title":"Language models are few-shot learners","volume-title":"Proc. Adv. Neural Inf. Process. Syst. (NeurIPS)","author":"Brown"},{"key":"ref11","article-title":"LLM-empowered resource allocation in wireless communications systems","author":"Lee","year":"2024","journal-title":"arXiv:2408.02944"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1109\/JSAC.2025.3643823"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.23919\/JCIN.2024.10582829"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1109\/lwc.2025.3543567"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.23919\/JCIN.2024.10582827"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/COMST.2024.3465447"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1016\/j.eng.2025.07.032"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/MWC.016.2300600"},{"key":"ref19","article-title":"Large generative AI models for telecom: The next big thing?","author":"Bariah","year":"2023","journal-title":"arXiv:2306.10249"},{"key":"ref20","article-title":"LoRA: Low-rank adaptation of large language models","author":"Hu","year":"2021","journal-title":"arXiv:2106.09685"},{"key":"ref21","article-title":"LoftQ: LoRA-fine-tuning-aware quantization for large language models","author":"Li","year":"2023","journal-title":"arXiv:2310.08659"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1109\/MSP.2014.2312183"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1109\/JCN.2013.000065"},{"key":"ref24","first-page":"1","article-title":"An image is worth 16\u00d716 words: Transformers for image recognition at scale","volume-title":"Proc. Int. Conf. Learn. Represent.","author":"Dosovitskiy"},{"key":"ref25","article-title":"A time series is worth 64 words: Long-term forecasting with transformers","author":"Nie","year":"2022","journal-title":"arXiv:2211.14730"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00745"},{"key":"ref27","article-title":"Qwen2.5 technical report","volume-title":"arXiv:2412.15115","author":"Yang","year":"2024"},{"key":"ref28","article-title":"Parameter-efficient tuning on layer normalization for pre-trained language models","author":"Qi","year":"2022","journal-title":"arXiv:2211.08682"},{"key":"ref29","article-title":"QLoRA: Efficient finetuning of quantized LLMs","author":"Dettmers","year":"2023","journal-title":"arXiv:2305.14314"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1109\/TCOMM.2019.2960361"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/TAP.2014.2310220"},{"key":"ref32","volume-title":"Study on Channel Model for Frequencies From 0.5 to 100 GHz","year":"2018"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2019.2937588"},{"key":"ref34","doi-asserted-by":"publisher","DOI":"10.1109\/OJCOMS.2020.2982513"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1109\/TVT.2023.3262951"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1109\/TCOMM.2018.2823715"},{"key":"ref37","doi-asserted-by":"publisher","DOI":"10.1109\/T-WC.2008.070851"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1109\/LWC.2017.2757490"},{"key":"ref39","doi-asserted-by":"publisher","DOI":"10.1109\/SPAWC.2017.8227772"}],"container-title":["IEEE Transactions on Communications"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/26\/11320979\/11218854.pdf?arnumber=11218854","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,1,7]],"date-time":"2026-01-07T18:31:37Z","timestamp":1767810697000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11218854\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2026]]},"references-count":39,"URL":"https:\/\/doi.org\/10.1109\/tcomm.2025.3626010","relation":{},"ISSN":["0090-6778","1558-0857"],"issn-type":[{"value":"0090-6778","type":"print"},{"value":"1558-0857","type":"electronic"}],"subject":[],"published":{"date-parts":[[2026]]}}}