{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,12,12]],"date-time":"2025-12-12T02:12:07Z","timestamp":1765505527169,"version":"3.48.0"},"publisher-location":"New York, NY, USA","reference-count":21,"publisher":"ACM","funder":[{"name":"This research was supported by the Basic Science Research Program of the National Research Foundation (NRF) funded by the Korean government (MSIT) (No. IITP-2025-RS-2024-00346737)","award":["IITP-2025-RS-2024-00346737"],"award-info":[{"award-number":["IITP-2025-RS-2024-00346737"]}]},{"name":"This research was funded by the Ministry of Science and ICT (MSIT), Korea, through the Global Scholars Invitation Program (No. RS-2024-00459638)","award":["RS-2024-00459638"],"award-info":[{"award-number":["RS-2024-00459638"]}]},{"name":"This research was funded by the Graduate School of Metaverse Convergence at Sungkyunkwan University (No. RS-2023-00254129)","award":["RS-2023-00254129"],"award-info":[{"award-number":["RS-2023-00254129"]}]},{"name":"This research was funded by the ICT Challenge and Advanced Network of HRD (ICAN) support program (No. RS-2023-00259497)","award":["RS-2023-00259497"],"award-info":[{"award-number":["RS-2023-00259497"]}]}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2025,11,10]]},"DOI":"10.1145\/3746252.3760906","type":"proceedings-article","created":{"date-parts":[[2025,11,8]],"date-time":"2025-11-08T00:36:36Z","timestamp":1762562196000},"page":"4920-4924","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":0,"title":["Spectral Edge Encoding - SEE: Does Structural Information Really Enhance Graph Transformer Performance?"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0009-0003-9913-6871","authenticated-orcid":false,"given":"Seungjun","family":"Lee","sequence":"first","affiliation":[{"name":"Dept. of Immersive Media Engineering\/Convergence Program for Social Innovation, Sungkyunkwan University, Suwon-si, Gyeonggi-do, Republic of Korea"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-7681-7987","authenticated-orcid":false,"given":"San","family":"Kim","sequence":"additional","affiliation":[{"name":"Dept. of Computer Science and Engineering, Sungkyunkwan University, Suwon-si, Gyeonggi-do, Republic of Korea"}]},{"ORCID":"https:\/\/orcid.org\/0009-0003-6443-1870","authenticated-orcid":false,"given":"Johyeon","family":"Kim","sequence":"additional","affiliation":[{"name":"Dept. of Applied Artificial Intelligence, Sungkyunkwan University, Suwon-si, Gyeonggi-do, Republic of Korea"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-5174-0074","authenticated-orcid":false,"given":"Jaekwang","family":"Kim","sequence":"additional","affiliation":[{"name":"Dept. of Applied Artificial Intelligence\/Convergence Program for Social Innovation, Sungkyunkwan University, Seoul, Republic of Korea"}]}],"member":"320","published-online":{"date-parts":[[2025,11,10]]},"reference":[{"key":"e_1_3_2_1_1_1","doi-asserted-by":"publisher","DOI":"10.1021\/jm9602928"},{"key":"e_1_3_2_1_2_1","doi-asserted-by":"publisher","DOI":"10.1080\/0022250X.2001.9990249"},{"key":"e_1_3_2_1_3_1","doi-asserted-by":"publisher","DOI":"10.1038\/s41598-022-08254--5"},{"key":"e_1_3_2_1_4_1","volume-title":"Chem-BERTa: Large-Scale Self-Supervised Pretraining for Molecular Property Prediction. arXiv preprint arXiv:2010.09885","author":"Chithrananda Shubhanshu","year":"2020","unstructured":"Shubhanshu Chithrananda, Gaurav Grand, and Balaji Ramsundar. 2020. Chem-BERTa: Large-Scale Self-Supervised Pretraining for Molecular Property Prediction. arXiv preprint arXiv:2010.09885 (2020)."},{"key":"e_1_3_2_1_5_1","unstructured":"Vijay Prakash Dwivedi and Xavier Bresson. 2020. A Generalization of Transformer Networks to Graphs. In Advances in Neural Information Processing Systems."},{"key":"e_1_3_2_1_6_1","volume-title":"Molecular Representation Learning with Language Models and Domain-Relevant Auxiliary Tasks (MolBERT). arXiv preprint arXiv:2011.13230","author":"Fabian Benedek","year":"2020","unstructured":"Benedek Fabian, Thomas Edlich, H\u00e9l\u00e8na Gaspar, Marwin Segler, Joshua Meyers, Marco Fiscato, and Mohamed Ahmed. 2020. Molecular Representation Learning with Language Models and Domain-Relevant Auxiliary Tasks (MolBERT). arXiv preprint arXiv:2011.13230 (2020). https:\/\/arxiv.org\/abs\/2011.13230"},{"key":"e_1_3_2_1_7_1","volume-title":"Proceedings of ICML 2024","author":"Feng Xia","year":"2024","unstructured":"Xia Feng, Ming Zhou, and Wei Wang. 2024. UniCorn: A Unified Framework for Pretraining on Molecular Graphs. Proceedings of ICML 2024 (2024)."},{"key":"e_1_3_2_1_8_1","series-title":"SIAM review 53, 2","volume-title":"Finding structure with randomness: Probabilistic algorithms for constructing approximate matrix decompositions","author":"Halko Nathan","year":"2011","unstructured":"Nathan Halko, Per-Gunnar Martinsson, and Joel A Tropp. 2011. Finding structure with randomness: Probabilistic algorithms for constructing approximate matrix decompositions. SIAM review 53, 2 (2011), 217--288."},{"key":"e_1_3_2_1_9_1","doi-asserted-by":"publisher","DOI":"10.1145\/3534678.3539296"},{"key":"e_1_3_2_1_10_1","volume-title":"Perturbation Theory for Linear Operators","author":"Kato Tosio","unstructured":"Tosio Kato. 1995. Perturbation Theory for Linear Operators (2nd ed.). Springer-Verlag. \/ .","edition":"2"},{"key":"e_1_3_2_1_11_1","unstructured":"San Kim Sichan Oh Seungjun Lee and Jaekwang Kim. 2025. Moir\u00e9 Graph Transformer: Eliminating Positional Encoding with Focused Attention. OpenReview. https:\/\/openreview.net\/forum?id=sJzfxRbEv6"},{"key":"e_1_3_2_1_12_1","doi-asserted-by":"publisher","DOI":"10.6028\/jres.045.026"},{"key":"e_1_3_2_1_13_1","first-page":"1234","article-title":"Mol-AE: Molecule AutoEncoder for Molecular Representation Learning","volume":"64","author":"Liu Ying","year":"2024","unstructured":"Ying Liu, Qiang Chen, and Rui Tang. 2024. Mol-AE: Molecule AutoEncoder for Molecular Representation Learning. Journal of Chemical Information and Modeling 64, 3 (2024), 1234--1245.","journal-title":"Journal of Chemical Information and Modeling"},{"key":"e_1_3_2_1_14_1","volume-title":"Locally Optimal Percolation for Network Resilience Dismantling via Fiedler Vector Gradient Iterative Attack. arXiv preprint arXiv:2505.06489","author":"Luo K.","year":"2025","unstructured":"K. Luo. 2025. Locally Optimal Percolation for Network Resilience Dismantling via Fiedler Vector Gradient Iterative Attack. arXiv preprint arXiv:2505.06489 (2025). https:\/\/arxiv.org\/abs\/2505.06489"},{"key":"e_1_3_2_1_15_1","volume-title":"The emerging field of signal processing on graphs: Extending high-dimensional data analysis to networks and other irregular domains","author":"Shuman David I","year":"2013","unstructured":"David I Shuman, Sunil K Narang, Pascal Frossard, Antonio Ortega, and Pierre Vandergheynst. 2013. The emerging field of signal processing on graphs: Extending high-dimensional data analysis to networks and other irregular domains. IEEE signal processing magazine 30, 3 (2013), 83--98."},{"volume-title":"Matrix Perturbation Theory","author":"Sun Stewart","key":"e_1_3_2_1_16_1","unstructured":"GilbertWStewart and Ji guang Sun. 2001. Matrix Perturbation Theory. Academic Press. 1 (First-order perturbation) Rayleigh ."},{"key":"e_1_3_2_1_17_1","doi-asserted-by":"publisher","DOI":"10.1039\/C7SC02664A"},{"key":"e_1_3_2_1_18_1","volume-title":"Do Transformers Really Perform Bad for Graph Representation? Advances in Neural Information Processing Systems","author":"Ying Rex","year":"2021","unstructured":"Rex Ying, Dylan Bourgeois, Jiaxuan You, Marinka Zitnik, and Jure Leskovec. 2021. Do Transformers Really Perform Bad for Graph Representation? Advances in Neural Information Processing Systems (2021)."},{"key":"e_1_3_2_1_19_1","volume-title":"Position-aware Graph Neural Networks. In International Conference on Learning Representations.","author":"You Jiaxuan","year":"2020","unstructured":"Jiaxuan You, Tianlong Chen, Rex Ying, P. Vijayaraghavan, and Jure Leskovec. 2020. Position-aware Graph Neural Networks. In International Conference on Learning Representations."},{"key":"e_1_3_2_1_20_1","volume-title":"NeurIPS","author":"Yu Zhi","year":"2024","unstructured":"Zhi Yu, Han Li, and Ling Xu. 2024. MoleBlend: Blending Architectures for Molecular Property Prediction. In NeurIPS 2024."},{"key":"e_1_3_2_1_21_1","volume-title":"Moleco: A Chemical Language Model for Molecular Property Prediction. arXiv preprint arXiv:2401.01234","author":"Zhang Liang","year":"2024","unstructured":"Liang Zhang, Yan Xu, and Ming Li. 2024. Moleco: A Chemical Language Model for Molecular Property Prediction. arXiv preprint arXiv:2401.01234 (2024)."}],"event":{"name":"CIKM '25: The 34th ACM International Conference on Information and Knowledge Management","sponsor":["SIGIR ACM Special Interest Group on Information Retrieval","SIGWEB ACM Special Interest Group on Hypertext, Hypermedia, and Web"],"location":"Seoul Republic of Korea","acronym":"CIKM '25"},"container-title":["Proceedings of the 34th ACM International Conference on Information and Knowledge Management"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3746252.3760906","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,12,12]],"date-time":"2025-12-12T02:09:21Z","timestamp":1765505361000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3746252.3760906"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,11,10]]},"references-count":21,"alternative-id":["10.1145\/3746252.3760906","10.1145\/3746252"],"URL":"https:\/\/doi.org\/10.1145\/3746252.3760906","relation":{},"subject":[],"published":{"date-parts":[[2025,11,10]]},"assertion":[{"value":"2025-11-10","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}