{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,2]],"date-time":"2026-04-02T15:21:41Z","timestamp":1775143301313,"version":"3.50.1"},"reference-count":56,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"7","license":[{"start":{"date-parts":[[2025,7,1]],"date-time":"2025-07-01T00:00:00Z","timestamp":1751328000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2025,7,1]],"date-time":"2025-07-01T00:00:00Z","timestamp":1751328000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,7,1]],"date-time":"2025-07-01T00:00:00Z","timestamp":1751328000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"name":"ASTRA Project through the National Research Foundation"},{"DOI":"10.13039\/501100014188","name":"Ministry of Science and ICT, South Korea","doi-asserted-by":"publisher","award":["RS-2024-00439619"],"award-info":[{"award-number":["RS-2024-00439619"]}],"id":[{"id":"10.13039\/501100014188","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Pattern Anal. Mach. Intell."],"published-print":{"date-parts":[[2025,7]]},"DOI":"10.1109\/tpami.2025.3550281","type":"journal-article","created":{"date-parts":[[2025,3,14]],"date-time":"2025-03-14T17:48:01Z","timestamp":1741974481000},"page":"5385-5396","source":"Crossref","is-referenced-by-count":7,"title":["Deformable Graph Transformer"],"prefix":"10.1109","volume":"47","author":[{"ORCID":"https:\/\/orcid.org\/0000-0001-6913-7556","authenticated-orcid":false,"given":"Jinyoung","family":"Park","sequence":"first","affiliation":[{"name":"Korea University, Seoul, South Korea"}]},{"given":"Seongjun","family":"Yun","sequence":"additional","affiliation":[{"name":"Amazon, Seattle, WA, USA"}]},{"given":"Hyeonjin","family":"Park","sequence":"additional","affiliation":[{"name":"NAVER, NAVER Corporation, Bundang, South Korea"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-6798-9106","authenticated-orcid":false,"given":"Jaewoo","family":"Kang","sequence":"additional","affiliation":[{"name":"Korea University, Seoul, South Korea"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-3614-4199","authenticated-orcid":false,"given":"Jisu","family":"Jeong","sequence":"additional","affiliation":[{"name":"NAVER, NAVER Corporation, Bundang, South Korea"}]},{"given":"Kyung-Min","family":"Kim","sequence":"additional","affiliation":[{"name":"NAVER, NAVER Corporation, Bundang, South Korea"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-7400-7681","authenticated-orcid":false,"given":"Jung-Woo","family":"Ha","sequence":"additional","affiliation":[{"name":"NAVER AI Lab, and NAVER CLOUD, NAVER Corporation, Bundang, South Korea"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-2181-9264","authenticated-orcid":false,"given":"Hyunwoo J.","family":"Kim","sequence":"additional","affiliation":[{"name":"School of Computing, Korea Advanced Institute of Science and Technology, Daejeon, South Korea"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.48550\/ARXIV.1706.03762"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.48550\/arXiv.1810.04805"},{"key":"ref3","first-page":"5753","article-title":"XLNet: Generalized autoregressive pretraining for language understanding","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Yang"},{"key":"ref4","first-page":"1877","article-title":"Language models are few-shot learners","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Brown"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP40776.2020.9053896"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.21437\/interspeech.2020-3015"},{"key":"ref7","article-title":"An image is worth 16 \u00d7 16 words: Transformers for image recognition at scale","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Dosovitskiy"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV48922.2021.00986"},{"key":"ref9","first-page":"30008","article-title":"Focal Attention for long-range interactions in vision transformers","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Yang","year":"2021"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-58452-8_13"},{"key":"ref11","article-title":"Deformable DETR: Deformable transformers for end-to-end object detection","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Zhu"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV48922.2021.01595"},{"key":"ref13","first-page":"28877","article-title":"Do transformers really perform badly for graph representation?","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Ying"},{"key":"ref14","article-title":"A generalization of transformer networks to graphs","volume-title":"Proc. AAAI Conf. Artif. Intell. Workshops","author":"Dwivedi"},{"key":"ref15","article-title":"Graphit: Encoding graph structure in transformers","author":"Mialon","year":"2021"},{"key":"ref16","first-page":"21618","article-title":"Rethinking graph transformers with spectral attention","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Kreuzer"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1007\/BF02289026"},{"key":"ref18","article-title":"Semi-supervised classification with graph convolutional networks","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Kipf"},{"key":"ref19","first-page":"1024","article-title":"Inductive representation learning on large graphs","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Hamilton"},{"key":"ref20","first-page":"6861","article-title":"Simplifying graph convolutional networks","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Wu"},{"key":"ref21","first-page":"5453","article-title":"Representation learning on graphs with jumping knowledge networks","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Xu"},{"key":"ref22","first-page":"1263","article-title":"Neural message passing for quantum chemistry","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Gilmer"},{"key":"ref23","first-page":"12559","article-title":"Self-supervised graph transformer on large-scale molecular data","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Rong"},{"key":"ref24","article-title":"Graph attention networks","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Veli\u010dkovi\u0107"},{"key":"ref25","article-title":"How attentive are graph attention networks?","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Brody"},{"key":"ref26","article-title":"How to find your friendly neighborhood: Graph attention design with self-supervision","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Kim"},{"key":"ref27","first-page":"21","article-title":"Mixhop: Higher-order graph convolutional architectures via sparsified neighborhood mixing","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Abu-El-Haija"},{"key":"ref28","article-title":"GEOM-GCN: Geometric graph convolutional networks","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Pei"},{"key":"ref29","first-page":"7793","article-title":"Beyond homophily in graph neural networks: Current limitations and effective designs","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Zhu"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v36i7.20765"},{"key":"ref31","first-page":"13266","article-title":"Representing long-range context for graph neural networks with global attention","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Wu"},{"key":"ref32","article-title":"Rethinking attention with performers","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Choromanski"},{"key":"ref33","first-page":"4651","article-title":"Perceiver: General perception with iterative attention","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Jaegle"},{"key":"ref34","article-title":"Reformer: The efficient transformer","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Kitaev"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i16.17664"},{"key":"ref36","article-title":"Perceiver IO: A general architecture for structured inputs & outputs","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Jaegle"},{"key":"ref37","article-title":"Generating long sequences with sparse transformers","author":"Child","year":"2019"},{"key":"ref38","first-page":"17283","article-title":"Big bird: Transformers for longer sequences","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Zaheer"},{"key":"ref39","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.emnlp-main.19"},{"key":"ref40","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR52688.2022.00475"},{"key":"ref41","doi-asserted-by":"publisher","DOI":"10.1145\/2939672.2939754"},{"key":"ref42","article-title":"Predict then propagate: Graph neural networks meet personalized pagerank","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Klicpera"},{"key":"ref43","doi-asserted-by":"publisher","DOI":"10.1198\/016214502388618906"},{"key":"ref44","article-title":"Revisiting link prediction: A data perspective","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Mao"},{"key":"ref45","doi-asserted-by":"publisher","DOI":"10.1609\/aimag.v29i3.2157"},{"key":"ref46","first-page":"22118","article-title":"Open graph benchmark: Datasets for machine learning on graphs","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Hu"},{"key":"ref47","doi-asserted-by":"publisher","DOI":"10.1093\/comnet\/cnab014"},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.1145\/1557019.1557108"},{"key":"ref49","first-page":"20887","article-title":"Large scale learning on non-homophilous graphs: New benchmarks and strong simple methods","volume-title":"Proc. Int. Conf. Neural Inf. Process. Syst.","author":"Lim"},{"key":"ref50","article-title":"Twitch gamers: A dataset for evaluating proximity preserving and structural role-based node embeddings","author":"Rozemberczki","year":"2021"},{"key":"ref51","article-title":"Fast graph representation learning with PyTorch geometric","volume-title":"Proc. Int. Conf. Learn. Representations W","author":"Fey"},{"key":"ref52","article-title":"Deep graph library: A graph-centric, highly-performant package for graph neural networks","author":"Wang","year":"2019"},{"key":"ref53","article-title":"Adam: A method for stochastic optimization","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Kingma"},{"issue":"1","key":"ref54","first-page":"1929","article-title":"Dropout: A simple way to prevent neural networks from overfitting","volume":"15","author":"Srivastava","year":"2014","journal-title":"J. Mach. Learn. Res."},{"key":"ref55","article-title":"Graph neural networks with learnable structural and positional representations","volume-title":"Proc. Int. Conf. Learn. Representations","author":"Dwivedi"},{"key":"ref56","article-title":"Benchmarking graph neural networks","author":"Dwivedi","year":"2020"}],"container-title":["IEEE Transactions on Pattern Analysis and Machine Intelligence"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/34\/11026037\/10925360.pdf?arnumber=10925360","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,6]],"date-time":"2025-06-06T17:42:19Z","timestamp":1749231739000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10925360\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,7]]},"references-count":56,"journal-issue":{"issue":"7"},"URL":"https:\/\/doi.org\/10.1109\/tpami.2025.3550281","relation":{},"ISSN":["0162-8828","2160-9292","1939-3539"],"issn-type":[{"value":"0162-8828","type":"print"},{"value":"2160-9292","type":"electronic"},{"value":"1939-3539","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025,7]]}}}