{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,30]],"date-time":"2026-03-30T21:02:38Z","timestamp":1774904558782,"version":"3.50.1"},"reference-count":52,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"1","license":[{"start":{"date-parts":[[2026,2,1]],"date-time":"2026-02-01T00:00:00Z","timestamp":1769904000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2026,2,1]],"date-time":"2026-02-01T00:00:00Z","timestamp":1769904000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2026,2,1]],"date-time":"2026-02-01T00:00:00Z","timestamp":1769904000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100020950","name":"National Science and Technology Council of Taiwan","doi-asserted-by":"publisher","award":["NSTC 114-2222-E-155-005"],"award-info":[{"award-number":["NSTC 114-2222-E-155-005"]}],"id":[{"id":"10.13039\/501100020950","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/100020595","name":"National Science and Technology Council","doi-asserted-by":"publisher","award":["NSTC 114-2218-E-110-005"],"award-info":[{"award-number":["NSTC 114-2218-E-110-005"]}],"id":[{"id":"10.13039\/100020595","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/100020595","name":"National Science and Technology Council","doi-asserted-by":"publisher","award":["NSTC 112-2221-E-110-038-MY3"],"award-info":[{"award-number":["NSTC 112-2221-E-110-038-MY3"]}],"id":[{"id":"10.13039\/100020595","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Sixth Generation Communication and Sensing Research Center"},{"name":"Higher Education SPROUT Project, the Ministry of Education of Taiwan"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Consumer Electron."],"published-print":{"date-parts":[[2026,2]]},"DOI":"10.1109\/tce.2025.3634569","type":"journal-article","created":{"date-parts":[[2025,11,19]],"date-time":"2025-11-19T18:45:40Z","timestamp":1763577940000},"page":"2135-2145","source":"Crossref","is-referenced-by-count":0,"title":["GAI Transformer Fusion for Multimodal Consumer Electronics: Scenario-Based Benchmarking and Resilience to Missing Data"],"prefix":"10.1109","volume":"72","author":[{"given":"M. Junaid","family":"Gul","sequence":"first","affiliation":[{"name":"Department of Information and Communication Engineering, Yeungnam University, Gyeongsan-si, Gyeongbuk-do, Republic of Korea"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-7257-3073","authenticated-orcid":false,"given":"Anal","family":"Paul","sequence":"additional","affiliation":[{"name":"Department of Computer Science and Engineering, Yuan Ze University, Taoyuan, Taiwan"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-9028-4518","authenticated-orcid":false,"given":"Keshav","family":"Singh","sequence":"additional","affiliation":[{"name":"Institute of Communications Engineering, National Sun Yat-sen University, Kaohsiung, Taiwan"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1145\/3065386"},{"key":"ref2","first-page":"3104","article-title":"Sequence to sequence learning with neural networks","volume-title":"Proc. Annu. Conf. Neural Inf. Process. Syst.","author":"Sutskever"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.48550\/ARXIV.1706.03762"},{"key":"ref4","first-page":"1597","article-title":"A simple framework for contrastive learning of visual representations","volume-title":"Proc. 37th Int. Conf. Mach. Learn.","volume":"119","author":"Chen"},{"key":"ref5","first-page":"1877","article-title":"Language models are few-shot learners","volume-title":"Proc. NIPS","author":"Brown"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1038\/s41586-021-03819-2"},{"key":"ref7","article-title":"On the opportunities and risks of foundation models","author":"Bommasani","year":"2021","journal-title":"arXiv:2108.07258"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.48550\/arXiv.1810.04805"},{"key":"ref9","article-title":"Benchmarking neural network robustness to common corruptions and perturbations","volume-title":"Proc. Int. Conf. Learn. Represent. (ICLR)","author":"Hendrycks"},{"key":"ref10","article-title":"Towards deep learning models resistant to adversarial attacks","volume-title":"Proc. Int. Conf. Learn. Represent. (ICLR)","author":"M\u0105dry"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.5555\/3045390.3045502"},{"key":"ref12","article-title":"Very deep convolutional networks for large-scale image recognition","volume-title":"Proc. Int. Conf. Learn. Represent. (ICLR)","author":"Simonyan"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.90"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2014.81"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.91"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.l007\/978-3-319-46448-0_2"},{"key":"ref17","article-title":"Neural machine translation by jointly learning to align and translate","volume-title":"Proc. Int. Conf. Learn. Represent. (ICLR)","author":"Bahdanau"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1016\/j.aiopen.2022.10.001"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1016\/j.eswa.2023.122666"},{"key":"ref20","first-page":"8748","article-title":"Learning transferable visual models from natural language supervision","volume-title":"Proc. Int. Conf. Mach. Learn.","volume":"139","author":"Radford"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1038\/nature24270"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.5555\/2969033.2969125"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2019.00453"},{"key":"ref24","first-page":"6840","article-title":"Denoising diffusion probabilistic models","volume-title":"Proc. NIPS","volume":"33","author":"Ho"},{"key":"ref25","first-page":"8821","article-title":"Zero-shot text-to-image generation","volume-title":"Proc. Int. Conf. Mach. Learn. (ICML)","author":"Ramesh"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR42600.2020.00975"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.5555\/3495724.3497510"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1109\/MCE.2024.3387049"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.3390\/electronics13244965"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1097\/PAP.0000000000000498"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1038\/s41586-024-07618-3"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.1038\/nbt.4235"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.1038\/s41587-019-0224-x"},{"key":"ref34","doi-asserted-by":"publisher","DOI":"10.1103\/PhysRevLett.120.145301"},{"key":"ref35","article-title":"Multimodal generative AI with autoregressive LLMs for human motion understanding and generation: A way forward","author":"Islam","year":"2025","journal-title":"arXiv:2506.03191"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1109\/TCE.2023.3323561"},{"key":"ref37","doi-asserted-by":"publisher","DOI":"10.1109\/TCE.2023.3323373"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1109\/TCE.2024.3365107"},{"key":"ref39","doi-asserted-by":"publisher","DOI":"10.1109\/TCE.2023.3296759"},{"key":"ref40","article-title":"Explaining and harnessing adversarial examples","volume-title":"Proc. Int. Conf. Learn. Represent. (ICLR)","author":"Goodfellow"},{"key":"ref41","article-title":"Towards a rigorous science of interpretable machine learning","author":"Doshi-Velez","year":"2017","journal-title":"arXiv:1702.08608"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.1145\/3287560.3287596"},{"key":"ref43","doi-asserted-by":"publisher","DOI":"10.1145\/3458723"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.1561\/2200000083"},{"key":"ref45","article-title":"Federated learning: Strategies for improving communication efficiency","author":"Kone\u010dn\u1ef3","year":"2016","journal-title":"arXiv:1610.05492"},{"key":"ref46","article-title":"Nitriding: A tool kit for building scalable, networked, secure enclaves","author":"Winter","year":"2022","journal-title":"arXiv:2206.04123"},{"issue":"164","key":"ref47","first-page":"1","article-title":"Improving reproducibility in machine learning research (a report from the NeurIPS 2019 reproducibility program)","volume-title":"J. Mach. Learn. Res.","volume":"22","author":"Pineau","year":"2020"},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2023.3330081"},{"key":"ref49","first-page":"997","article-title":"MLIF-Net: Multimodal language-image fusion network to distinguish between AI-generated and real images","volume-title":"Proc. 8th Int. Conf. Adv. Algorithms Control Eng. (ICAACE)","author":"Islam"},{"key":"ref50","doi-asserted-by":"publisher","DOI":"10.1016\/j.media.2017.07.005"},{"key":"ref51","article-title":"Dataset of clinical cases, images, image labels and captions from open access pubmed central articles (multicare dataset)","volume":"48","author":"Offidani","year":"2023","journal-title":"Data Brief"},{"key":"ref52","doi-asserted-by":"publisher","DOI":"10.1145\/2766462.2767755"}],"container-title":["IEEE Transactions on Consumer Electronics"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/30\/11456295\/11259491.pdf?arnumber=11259491","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,3,30]],"date-time":"2026-03-30T20:06:39Z","timestamp":1774901199000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11259491\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2026,2]]},"references-count":52,"journal-issue":{"issue":"1"},"URL":"https:\/\/doi.org\/10.1109\/tce.2025.3634569","relation":{},"ISSN":["0098-3063","1558-4127"],"issn-type":[{"value":"0098-3063","type":"print"},{"value":"1558-4127","type":"electronic"}],"subject":[],"published":{"date-parts":[[2026,2]]}}}