{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,10,8]],"date-time":"2025-10-08T15:22:18Z","timestamp":1759936938154,"version":"3.41.0"},"reference-count":65,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"6","license":[{"start":{"date-parts":[[2025,6,1]],"date-time":"2025-06-01T00:00:00Z","timestamp":1748736000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/legalcode"}],"funder":[{"DOI":"10.13039\/501100010418","name":"Institute of Information and Communications Technology Planning and Evaluation (IITP) grant funded by the Korean Government [Ministry of Science and ICT (MSIT)]","doi-asserted-by":"publisher","award":["20210-02068","IITP-2024-00156287"],"award-info":[{"award-number":["20210-02068","IITP-2024-00156287"]}],"id":[{"id":"10.13039\/501100010418","id-type":"DOI","asserted-by":"publisher"}]},{"name":"KENTECH Research","award":["202200019A"],"award-info":[{"award-number":["202200019A"]}]},{"DOI":"10.13039\/501100003725","name":"National Research Foundation of Korea","doi-asserted-by":"publisher","award":["4199990214639"],"award-info":[{"award-number":["4199990214639"]}],"id":[{"id":"10.13039\/501100003725","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Neural Netw. Learning Syst."],"published-print":{"date-parts":[[2025,6]]},"DOI":"10.1109\/tnnls.2024.3472897","type":"journal-article","created":{"date-parts":[[2024,11,25]],"date-time":"2024-11-25T18:48:00Z","timestamp":1732560480000},"page":"10271-10282","source":"Crossref","is-referenced-by-count":2,"title":["Beyond Message-Passing: Generalization of Graph Neural Networks via Feature Perturbation for Semi-Supervised Node Classification"],"prefix":"10.1109","volume":"36","author":[{"ORCID":"https:\/\/orcid.org\/0000-0003-4359-5596","authenticated-orcid":false,"given":"Yoonhyuk","family":"Choi","sequence":"first","affiliation":[{"name":"School of Computing and Augmented Intelligence, Arizona State University, Tempe, AZ, USA"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-7140-7962","authenticated-orcid":false,"given":"Jiho","family":"Choi","sequence":"additional","affiliation":[{"name":"Korea Advanced Institute of Science and Technology (KAIST) AI, Seoul, South Korea"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-7248-4751","authenticated-orcid":false,"given":"Taewook","family":"Ko","sequence":"additional","affiliation":[{"name":"Computer Engineering, Seoul National University, Seoul, South Korea"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-9492-6546","authenticated-orcid":false,"given":"Chong-Kwon","family":"Kim","sequence":"additional","affiliation":[{"name":"Energy AI, Korea Institute of Energy Technology, Naju, South Korea"}]}],"member":"263","reference":[{"key":"ref1","first-page":"1","article-title":"Convolutional neural networks on graphs with fast localized spectral filtering","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"29","author":"Defferrard"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.48550\/ARXIV.1609.02907"},{"key":"ref3","first-page":"1","article-title":"Inductive representation learning on large graphs","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"30","author":"Hamilton"},{"key":"ref4","first-page":"20","article-title":"Graph attention networks","volume":"1050","author":"Velickovic","year":"2017","journal-title":"Stat"},{"key":"ref5","first-page":"1263","article-title":"Neural message passing for quantum chemistry","volume-title":"Proc. 34th Int. Conf. Mach. Learn.","volume":"70","author":"Gilmer"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1146\/annurev.soc.27.1.415"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.24963\/ijcai.2019\/565"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i5.16514"},{"key":"ref9","article-title":"How to find your friendly neighborhood: Graph attention design with self-supervision","volume-title":"arXiv:2204.04879","author":"Kim","year":"2022"},{"key":"ref10","first-page":"1","article-title":"Gnnexplainer: Generating explanations for graph neural networks","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"32","author":"Ying"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1145\/3437963.3441734"},{"key":"ref12","article-title":"Geom-GCN: Geometric graph convolutional networks","volume-title":"arXiv:2002.05287","author":"Pei","year":"2020"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1145\/3437963.3441735"},{"key":"ref14","first-page":"13242","article-title":"Finding global homophily in graph neural networks when meeting heterophily","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Li"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1145\/3447548.3467451"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2020.2978386"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2021.3090503"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/tnnls.2022.3155478"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1109\/Confluence52989.2022.9734174"},{"key":"ref20","article-title":"How universal polynomial bases enhance spectral graph neural networks: Heterophily, over-smoothing, and over-squashing","volume-title":"arXiv:2405.12474","author":"Huang","year":"2024"},{"key":"ref21","article-title":"Spectral networks and locally connected networks on graphs","volume-title":"arXiv:1312.6203","author":"Bruna","year":"2013"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1145\/3459637.3482226"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2021.3054830"},{"key":"ref24","first-page":"7793","article-title":"Beyond homophily in graph neural networks: Current limitations and effective designs","volume":"33","author":"Zhu","year":"2020","journal-title":"Proc. Adv. Neural Inf. Process. Syst."},{"key":"ref25","article-title":"Adaptive universal generalized PageRank graph neural network","volume-title":"arXiv:2006.07988","author":"Chien","year":"2020"},{"key":"ref26","article-title":"Revisiting heterophily for graph neural networks","volume-title":"arXiv:2210.07606","author":"Luan","year":"2022"},{"key":"ref27","first-page":"23341","article-title":"How powerful are spectral graph neural networks","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Wang"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1109\/tnnls.2022.3161030"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1109\/tnnls.2023.3282049"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1109\/tnnls.2022.3230417"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2024.3370918"},{"key":"ref32","article-title":"Stochastic training of graph convolutional networks with variance reduction","volume-title":"arXiv:1710.10568","author":"Chen","year":"2017"},{"key":"ref33","article-title":"Sparse-input neural networks for high-dimensional nonparametric regression and classification","volume-title":"arXiv:1711.07592","author":"Feng","year":"2017"},{"key":"ref34","first-page":"23768","article-title":"Be confident! Towards trustworthy graph neural networks via confidence calibration","volume-title":"Proc. Adv. Neural Inf. Process. Syst. (NIPS)","volume":"34","author":"Wang"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1109\/tnnls.2022.3179306"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2023.3321097"},{"key":"ref37","doi-asserted-by":"publisher","DOI":"10.1109\/TNSE.2023.3306545"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2024.3371592"},{"key":"ref39","article-title":"Normalization techniques in training DNNs: Methodology, analysis and application","volume-title":"arXiv:2009.12836","author":"Huang","year":"2020"},{"key":"ref40","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i5.16586"},{"key":"ref41","article-title":"Unifying graph convolutional neural networks and label propagation","volume-title":"arXiv:2002.06755","author":"Wang","year":"2020"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.1109\/TAI.2021.3096489"},{"key":"ref43","doi-asserted-by":"publisher","DOI":"10.1109\/TKDE.2021.3072345"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.1109\/tmm.2022.3233442"},{"key":"ref45","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2023.3285215"},{"key":"ref46","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2022.3185621"},{"key":"ref47","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v36i4.20316"},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2023.3295344"},{"key":"ref49","doi-asserted-by":"publisher","DOI":"10.1145\/3485447.3512169"},{"key":"ref50","article-title":"Predict then propagate: Graph neural networks meet personalized PageRank","volume-title":"arXiv:1810.05997","author":"Gasteiger","year":"2018"},{"key":"ref51","first-page":"1725","article-title":"Simple and deep graph convolutional networks","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Chen"},{"key":"ref52","first-page":"1858","article-title":"Shift invariance can reduce adversarial robustness","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"34","author":"Singla"},{"key":"ref53","first-page":"1","article-title":"Convergence analysis of two-layer neural networks with relu activation","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"30","author":"Li"},{"issue":"1","key":"ref54","first-page":"9885","article-title":"A group-theoretic framework for data augmentation","volume":"21","author":"Chen","year":"2020","journal-title":"J. Mach. Learn. Res."},{"key":"ref55","doi-asserted-by":"publisher","DOI":"10.1023\/A:1009778005914"},{"issue":"7","key":"ref56","first-page":"579","article-title":"Multilayer perceptron and neural networks","volume":"8","author":"Popescu","year":"2009","journal-title":"WSEAS Trans. Circuits Syst."},{"key":"ref57","article-title":"How attentive are graph attention networks?","volume-title":"arXiv:2105.14491","author":"Brody","year":"2021"},{"key":"ref58","article-title":"How powerful are graph neural networks?","volume-title":"arXiv:1810.00826","author":"Xu","year":"2018"},{"key":"ref59","doi-asserted-by":"publisher","DOI":"10.1145\/1557019.1557108"},{"key":"ref60","doi-asserted-by":"publisher","DOI":"10.1145\/3341161.3342890"},{"key":"ref61","first-page":"20887","article-title":"Large scale learning on non-homophilous graphs: New benchmarks and strong simple methods","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Lim"},{"key":"ref62","doi-asserted-by":"publisher","DOI":"10.1145\/3583780.3615117"},{"key":"ref63","article-title":"Two sides of the same coin: Heterophily and oversmoothing in graph convolutional neural networks","volume-title":"arXiv:2102.06462","author":"Yan","year":"2021"},{"key":"ref64","article-title":"Variational graph auto-encoders","volume-title":"arXiv:1611.07308","author":"Kipf","year":"2016"},{"key":"ref65","article-title":"DropEdge: Towards deep graph convolutional networks on node classification","volume-title":"arXiv:1907.10903","author":"Rong","year":"2019"}],"container-title":["IEEE Transactions on Neural Networks and Learning Systems"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/5962385\/11022714\/10767362.pdf?arnumber=10767362","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,4]],"date-time":"2025-06-04T17:57:12Z","timestamp":1749059832000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10767362\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,6]]},"references-count":65,"journal-issue":{"issue":"6"},"URL":"https:\/\/doi.org\/10.1109\/tnnls.2024.3472897","relation":{},"ISSN":["2162-237X","2162-2388"],"issn-type":[{"type":"print","value":"2162-237X"},{"type":"electronic","value":"2162-2388"}],"subject":[],"published":{"date-parts":[[2025,6]]}}}