{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,10]],"date-time":"2026-04-10T17:11:20Z","timestamp":1775841080955,"version":"3.50.1"},"publisher-location":"New York, NY, USA","reference-count":83,"publisher":"ACM","funder":[{"name":"National Science Foundation of China","award":["623B2010, 62225202, 62302023"],"award-info":[{"award-number":["623B2010, 62225202, 62302023"]}]},{"name":"Fundamental Research Funds for the Central Universities","award":["&#x5c;&#x2f;"],"award-info":[{"award-number":["&#x5c;&#x2f;"]}]},{"name":"Academic Excellence Foundation of BUAA for PhD Students","award":["&#x5c;&#x2f;"],"award-info":[{"award-number":["&#x5c;&#x2f;"]}]}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2026,4,13]]},"DOI":"10.1145\/3774904.3792139","type":"proceedings-article","created":{"date-parts":[[2026,4,9]],"date-time":"2026-04-09T21:54:34Z","timestamp":1775771674000},"page":"626-637","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":0,"title":["RAG-GFM: Overcoming In-Memory Bottlenecks in Graph Foundation Models via Retrieval-Augmented Generation"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0000-0001-9205-8610","authenticated-orcid":false,"given":"Haonan","family":"Yuan","sequence":"first","affiliation":[{"name":"SKLCCSE, School of Computer Science and Engineering, Beihang University, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-1930-3848","authenticated-orcid":false,"given":"Qingyun","family":"Sun","sequence":"additional","affiliation":[{"name":"SKLCCSE, School of Computer Science and Engineering, Beihang University, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0009-0009-6877-5455","authenticated-orcid":false,"given":"Jiacheng","family":"Tao","sequence":"additional","affiliation":[{"name":"SKLCCSE, School of Computer Science and Engineering, Beihang University, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-4643-8126","authenticated-orcid":false,"given":"Xingcheng","family":"Fu","sequence":"additional","affiliation":[{"name":"{Key Lab of Education Blockchain and Intelligent Technology, Ministry of Education, Guangxi Normal University, Guilin, Guangxi, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-5152-0055","authenticated-orcid":false,"given":"Jianxin","family":"Li","sequence":"additional","affiliation":[{"name":"SKLCCSE, School of Computer Science and Engineering, Beihang University, Beijing0009, China"}]}],"member":"320","published-online":{"date-parts":[[2026,4,12]]},"reference":[{"key":"e_1_3_2_1_1_1","doi-asserted-by":"publisher","DOI":"10.1145\/3592626.3592663"},{"key":"e_1_3_2_1_2_1","first-page":"142","article-title":"When to pre-train graph neural networks? From data generation perspective!","author":"Cao Yuxuan","year":"2023","unstructured":"Yuxuan Cao, Jiarong Xu, Carl Yang, Jiaan Wang, Yunchao Zhang, Chunping Wang, Lei Chen, and Yang Yang. 2023. When to pre-train graph neural networks? From data generation perspective!. In KDD. 142-153.","journal-title":"KDD."},{"key":"e_1_3_2_1_3_1","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v34i04.5747"},{"key":"e_1_3_2_1_4_1","doi-asserted-by":"publisher","DOI":"10.1016\/j.neucom.2022.05.070"},{"key":"e_1_3_2_1_5_1","volume-title":"Subgraph centrality in complex networks. Physical Review E\u2014Statistical, Nonlinear, and Soft Matter Physics","author":"Estrada Ernesto","year":"2005","unstructured":"Ernesto Estrada and Juan A Rodriguez-Velazquez. 2005. Subgraph centrality in complex networks. Physical Review E\u2014Statistical, Nonlinear, and Soft Matter Physics, Vol. 71, 5 (2005), 056103."},{"key":"e_1_3_2_1_6_1","first-page":"6491","article-title":"A survey on rag meeting LLMs: Towards retrieval-augmented large language models","author":"Fan Wenqi","year":"2024","unstructured":"Wenqi Fan, Yujuan Ding, Liangbo Ning, Shijie Wang, Hengyun Li, Dawei Yin, Tat-Seng Chua, and Qing Li. 2024. A survey on rag meeting LLMs: Towards retrieval-augmented large language models. In KDD. 6491-6501.","journal-title":"KDD."},{"key":"e_1_3_2_1_7_1","first-page":"417","article-title":"Graph neural networks for social recommendation","author":"Fan Wenqi","year":"2019","unstructured":"Wenqi Fan, Yao Ma, Qing Li, Yuan He, Eric Zhao, Jiliang Tang, and Dawei Yin. 2019. Graph neural networks for social recommendation. In WWW. 417-426.","journal-title":"WWW."},{"key":"e_1_3_2_1_8_1","doi-asserted-by":"publisher","DOI":"10.1145\/276675.276685"},{"key":"e_1_3_2_1_9_1","doi-asserted-by":"publisher","DOI":"10.1038\/s41467-021-23303-9"},{"key":"e_1_3_2_1_10_1","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v39i11.33279"},{"key":"e_1_3_2_1_11_1","unstructured":"Zihao Guo Qingyun Sun Ziwei Zhang Haonan Yuan Huiping Zhuang Xingcheng Fu and Jianxin Li. 2025b. GraphKeeper: Graph domain-incremental learning via knowledge disentanglement and preservation. In NeurIPS."},{"key":"e_1_3_2_1_12_1","volume-title":"NeurIPS","volume":"30","author":"Hamilton Will","year":"2017","unstructured":"Will Hamilton, Zhitao Ying, and Jure Leskovec. 2017. Inductive representation learning on large graphs. NeurIPS, Vol. 30 (2017)."},{"key":"e_1_3_2_1_13_1","unstructured":"Haoyu Han Yu Wang Harry Shomer Kai Guo Jiayuan Ding Yongjia Lei Mahantesh Halappanavar Ryan A Rossi Subhabrata Mukherjee Xianfeng Tang et al. 2024. Retrieval-augmented generation with graphs (GraphRAG). arXiv preprint arXiv:2501.00309 (2024)."},{"key":"e_1_3_2_1_14_1","first-page":"448","article-title":"UniGraph: Learning a unified cross-domain foundation model for text-attributed graphs","author":"He Yufei","year":"2025","unstructured":"Yufei He, Yuan Sui, Xiaoxin He, and Bryan Hooi. 2025. UniGraph: Learning a unified cross-domain foundation model for text-attributed graphs. In KDD. 448-459.","journal-title":"KDD."},{"key":"e_1_3_2_1_15_1","first-page":"22118","article-title":"Open graph benchmark: Datasets for machine learning on graphs","volume":"33","author":"Hu Weihua","year":"2020","unstructured":"Weihua Hu, Matthias Fey, Marinka Zitnik, Yuxiao Dong, Hongyu Ren, Bowen Liu, Michele Catasta, and Jure Leskovec. 2020. Open graph benchmark: Datasets for machine learning on graphs. NeurIPS, Vol. 33 (2020), 22118-22133.","journal-title":"NeurIPS"},{"key":"e_1_3_2_1_16_1","first-page":"6397","article-title":"Few-shot relational reasoning via connection subgraph pretraining","volume":"35","author":"Huang Qian","year":"2022","unstructured":"Qian Huang, Hongyu Ren, and Jure Leskovec. 2022. Few-shot relational reasoning via connection subgraph pretraining. NeurIPS, Vol. 35 (2022), 6397-6409.","journal-title":"NeurIPS"},{"key":"e_1_3_2_1_17_1","first-page":"7969","article-title":"Active retrieval augmented generation","author":"Jiang Zhengbao","year":"2023","unstructured":"Zhengbao Jiang, Frank F Xu, Luyu Gao, Zhiqing Sun, Qian Liu, Jane Dwivedi-Yu, Yiming Yang, Jamie Callan, and Graham Neubig. 2023. Active retrieval augmented generation. In EMNLP. 7969-7992.","journal-title":"EMNLP."},{"key":"e_1_3_2_1_18_1","first-page":"2268","article-title":"Not too little, not too much: A theoretical analysis of graph (over) smoothing","volume":"35","author":"Keriven Nicolas","year":"2022","unstructured":"Nicolas Keriven. 2022. Not too little, not too much: A theoretical analysis of graph (over) smoothing. NeurIPS, Vol. 35 (2022), 2268-2281.","journal-title":"NeurIPS"},{"key":"e_1_3_2_1_19_1","first-page":"16190","article-title":"FactKG: Fact verification via reasoning on knowledge graphs","author":"Kim Jiho","year":"2023","unstructured":"Jiho Kim, Sungjin Park, Yeonsu Kwon, Yohan Jo, James Thorne, and Edward Choi. 2023. FactKG: Fact verification via reasoning on knowledge graphs. In ACL. 16190-16206.","journal-title":"ACL."},{"key":"e_1_3_2_1_20_1","unstructured":"Thomas N Kipf and Max Welling. 2017. Semi-supervised classification with graph convolutional networks. In ICLR."},{"key":"e_1_3_2_1_21_1","volume-title":"Estimating mutual information. Physical Review E\u2014Statistical, Nonlinear, and Soft Matter Physics","author":"Kraskov Alexander","year":"2004","unstructured":"Alexander Kraskov, Harald St\u00f6gbauer, and Peter Grassberger. 2004. Estimating mutual information. Physical Review E\u2014Statistical, Nonlinear, and Soft Matter Physics, Vol. 69, 6 (2004), 066138."},{"key":"e_1_3_2_1_22_1","first-page":"9459","article-title":"Retrieval-augmented generation for knowledge-intensive NLP tasks","volume":"33","author":"Lewis Patrick","year":"2020","unstructured":"Patrick Lewis, Ethan Perez, Aleksandra Piktus, Fabio Petroni, Vladimir Karpukhin, Naman Goyal, Heinrich K\u00fcttler, Mike Lewis, Wen-tau Yih, Tim Rockt\u00e4schel, et al., 2020. Retrieval-augmented generation for knowledge-intensive NLP tasks. NeurIPS, Vol. 33 (2020), 9459-9474.","journal-title":"NeurIPS"},{"key":"e_1_3_2_1_23_1","first-page":"11560","article-title":"Structure-aware language model pretraining improves dense retrieval on structured data","author":"Li Xinze","year":"2023","unstructured":"Xinze Li, Zhenghao Liu, Chenyan Xiong, Shi Yu, Yu Gu, Zhiyuan Liu, and Ge Yu. 2023. Structure-aware language model pretraining improves dense retrieval on structured data. In ACL Findings. 11560-11574.","journal-title":"ACL Findings."},{"key":"e_1_3_2_1_24_1","first-page":"1725","article-title":"ZeroG: Investigating cross-dataset zero-shot transferability in graphs","author":"Li Yuhan","year":"2024","unstructured":"Yuhan Li, Peisong Wang, Zhixun Li, Jeffrey Xu Yu, and Jia Li. 2024. ZeroG: Investigating cross-dataset zero-shot transferability in graphs. In KDD. 1725-1735.","journal-title":"KDD."},{"key":"e_1_3_2_1_25_1","unstructured":"Hao Liu Jiarui Feng Lecheng Kong Ningyue Liang Dacheng Tao Yixin Chen and Muhan Zhang. 2024a. One for all: Towards training one graph model for all classification tasks. In ICLR."},{"key":"e_1_3_2_1_26_1","volume-title":"One model for one graph: A new perspective for pretraining with cross-domain graphs. arXiv preprint arXiv:2412.00315","author":"Liu Jingzhe","year":"2024","unstructured":"Jingzhe Liu, Haitao Mao, Zhikai Chen, Wenqi Fan, Mingxuan Ju, Tong Zhao, Neil Shah, and Jiliang Tang. 2024b. One model for one graph: A new perspective for pretraining with cross-domain graphs. arXiv preprint arXiv:2412.00315 (2024)."},{"key":"e_1_3_2_1_27_1","first-page":"417","article-title":"GraphPrompt: Unifying pre-training and downstream tasks for graph neural networks","author":"Liu Zemin","year":"2023","unstructured":"Zemin Liu, Xingtong Yu, Yuan Fang, and Xinming Zhang. 2023. GraphPrompt: Unifying pre-training and downstream tasks for graph neural networks. In WWW. 417-428.","journal-title":"WWW."},{"key":"e_1_3_2_1_28_1","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i5.16552"},{"key":"e_1_3_2_1_29_1","doi-asserted-by":"crossref","unstructured":"Jiayi Luo Qingyun Sun Lingjuan Lyu Ziwei Zhang Haonan Yuan Xingcheng Fu and Jianxin Li. 2026 a. Towards effective stealthy and persistent backdoor attacks targeting graph foundation models. In AAAI.","DOI":"10.1609\/aaai.v40i29.39593"},{"key":"e_1_3_2_1_30_1","doi-asserted-by":"crossref","unstructured":"Jiayi Luo Qingyun Sun Yuecen Wei Haonan Yuan Xingcheng Fu and Jianxin Li. 2026 b. Privacy auditing of multi-domain graph pre-trained model under membership inference attacks. In AAAI.","DOI":"10.1609\/aaai.v40i18.38576"},{"key":"e_1_3_2_1_31_1","unstructured":"Linhao Luo Zicheng Zhao Gholamreza Haffari Dinh Phung Chen Gong and Shirui Pan. 2025. GFM-RAG: graph foundation model for retrieval augmented generation. In NeurIPS."},{"key":"e_1_3_2_1_32_1","first-page":"5303","article-title":"Query rewriting in retrieval-augmented large language models","author":"Ma Xinbei","year":"2023","unstructured":"Xinbei Ma, Yeyun Gong, Pengcheng He, Hai Zhao, and Nan Duan. 2023. Query rewriting in retrieval-augmented large language models. In EMNLP. 5303-5315.","journal-title":"EMNLP."},{"key":"e_1_3_2_1_33_1","volume-title":"Deep learning on graphs","author":"Ma Yao","unstructured":"Yao Ma and Jiliang Tang. 2021. Deep learning on graphs. Cambridge University Press."},{"key":"e_1_3_2_1_34_1","volume-title":"Position: Graph foundation models are already here. In ICML.","author":"Mao Haitao","year":"2024","unstructured":"Haitao Mao, Zhikai Chen, Wenzhuo Tang, Jianan Zhao, Yao Ma, Tong Zhao, Neil Shah, Mikhail Galkin, and Jiliang Tang. 2024. Position: Graph foundation models are already here. In ICML."},{"key":"e_1_3_2_1_35_1","doi-asserted-by":"publisher","DOI":"10.1023\/A:1009953814988"},{"key":"e_1_3_2_1_36_1","volume-title":"Wiki-CS: A wikipedia-based benchmark for graph neural networks. arXiv preprint arXiv:2007.02901","author":"Mernyei P\u00e9ter","year":"2020","unstructured":"P\u00e9ter Mernyei and C\u0103t\u0103lina Cangea. 2020. Wiki-CS: A wikipedia-based benchmark for graph neural networks. arXiv preprint arXiv:2007.02901 (2020)."},{"key":"e_1_3_2_1_37_1","volume-title":"Representation learning with contrastive predictive coding. arXiv preprint arXiv:1807.03748","author":"van den Oord Aaron","year":"2018","unstructured":"Aaron van den Oord, Yazhe Li, and Oriol Vinyals. 2018. Representation learning with contrastive predictive coding. arXiv preprint arXiv:1807.03748 (2018)."},{"key":"e_1_3_2_1_38_1","doi-asserted-by":"publisher","DOI":"10.1080\/14786440109462720"},{"key":"e_1_3_2_1_39_1","volume-title":"Graph retrieval-augmented generation: A survey. arXiv preprint arXiv:2408.08921","author":"Peng Boci","year":"2024","unstructured":"Boci Peng, Yun Zhu, Yongchao Liu, Xiaohe Bo, Haizhou Shi, Chuntao Hong, Yan Zhang, and Siliang Tang. 2024. Graph retrieval-augmented generation: A survey. arXiv preprint arXiv:2408.08921 (2024)."},{"key":"e_1_3_2_1_40_1","doi-asserted-by":"publisher","DOI":"10.1162\/tacl_a_00605"},{"key":"e_1_3_2_1_41_1","unstructured":"Vinay Venkatesh Ramasesh Aitor Lewkowycz and Ethan Dyer. 2021. Effect of scale on catastrophic forgetting in neural networks. In ICLR."},{"key":"e_1_3_2_1_42_1","doi-asserted-by":"publisher","DOI":"10.1609\/aimag.v29i3.2157"},{"key":"e_1_3_2_1_43_1","volume-title":"Frontiers of Computer Science","volume":"18","author":"Shi Chuan","year":"2024","unstructured":"Chuan Shi, Junze Chen, Jiawei Liu, and Cheng Yang. 2024. Graph foundation model. Frontiers of Computer Science, Vol. 18, 6 (2024)."},{"key":"e_1_3_2_1_44_1","unstructured":"Junhua Shi Qingyun Sun Haonan Yuan and Xingcheng Fu. 2026. SA2GFM: Enhancing robust graph foundation models with structure-aware semantic augmentation. In AAAI."},{"key":"e_1_3_2_1_45_1","unstructured":"Joshua Southern Yam Eitan Guy Bar-Shalom Michael M Bronstein Haggai Maron and Fabrizio Frasca. 2025. Balancing efficiency and expressiveness: Subgraph GNNs with walk-based centrality. In ICML."},{"key":"e_1_3_2_1_46_1","unstructured":"Fan-Yun Sun Jordan Hoffman Vikas Verma and Jian Tang. 2020. InfoGraph: Unsupervised and semi-supervised graph-level representation learning via mutual information maximization. In ICLR."},{"key":"e_1_3_2_1_47_1","first-page":"1717","article-title":"GPPT: Graph pre-training and prompt tuning to generalize graph neural networks","author":"Sun Mingchen","year":"2022","unstructured":"Mingchen Sun, Kaixiong Zhou, Xin He, Ying Wang, and Xin Wang. 2022. GPPT: Graph pre-training and prompt tuning to generalize graph neural networks. In KDD. 1717-1727.","journal-title":"KDD."},{"key":"e_1_3_2_1_48_1","volume-title":"Information-Theoretic Foundations and Advances in Graph Machine Learning: A Comprehensive Survey. Authorea Preprints","author":"Sun Qingyun","year":"2026","unstructured":"Qingyun Sun, Yi Huang, Haonan Yuan, Xingcheng Fu, Yisen Gao, Jia Wu, Shujian Yu, Angsheng Li, Jianxin Li, and Philip S Yu. 2026. Information-Theoretic Foundations and Advances in Graph Machine Learning: A Comprehensive Survey. Authorea Preprints (2026)."},{"key":"e_1_3_2_1_49_1","volume-title":"DyG-RAG: Dynamic graph retrieval-augmented generation with event-centric reasoning. arXiv preprint arXiv:2507.13396","author":"Sun Qingyun","year":"2025","unstructured":"Qingyun Sun, Jiaqi Yuan, Shan He, Xiao Guan, Haonan Yuan, Xingcheng Fu, Jianxin Li, and Philip S Yu. 2025. DyG-RAG: Dynamic graph retrieval-augmented generation with event-centric reasoning. arXiv preprint arXiv:2507.13396 (2025)."},{"key":"e_1_3_2_1_50_1","volume-title":"Dynamics of directed graphs: the world-wide Web. Physica A: Statistical Mechanics and its Applications","author":"Tadi\u0107 Bosiljka","year":"2001","unstructured":"Bosiljka Tadi\u0107. 2001. Dynamics of directed graphs: the world-wide Web. Physica A: Statistical Mechanics and its Applications, Vol. 293, 1-2 (2001), 273-284."},{"key":"e_1_3_2_1_51_1","volume-title":"NeurIPS","volume":"36","author":"Tan Yanchao","year":"2024","unstructured":"Yanchao Tan, Zihao Zhou, Hang Lv, Weiming Liu, and Carl Yang. 2024. WalkLM: A uniform language model fine-tuning framework for attributed graph embedding. NeurIPS, Vol. 36 (2024)."},{"key":"e_1_3_2_1_52_1","first-page":"2842","article-title":"HiGPT: Heterogeneous graph language model","author":"Tang Jiabin","year":"2024","unstructured":"Jiabin Tang, Yuhao Yang, Wei Wei, Lei Shi, Long Xia, Dawei Yin, and Chao Huang. 2024b. HiGPT: Heterogeneous graph language model. In KDD. 2842-2853.","journal-title":"KDD."},{"key":"e_1_3_2_1_53_1","volume-title":"Cross-domain graph data scaling: a showcase with diffusion models. arXiv preprint arXiv:2406.01899","author":"Tang Wenzhuo","year":"2024","unstructured":"Wenzhuo Tang, Haitao Mao, Danial Dervovic, Ivan Brugere, Saumitra Mishra, Yuying Xie, and Jiliang Tang. 2024a. Cross-domain graph data scaling: a showcase with diffusion models. arXiv preprint arXiv:2406.01899 (2024)."},{"key":"e_1_3_2_1_54_1","volume-title":"Multihop-RAG: Benchmarking retrieval-augmented generation for multi-hop queries. arXiv preprint arXiv:2401.15391","author":"Tang Yixuan","year":"2024","unstructured":"Yixuan Tang and Yi Yang. 2024. Multihop-RAG: Benchmarking retrieval-augmented generation for multi-hop queries. arXiv preprint arXiv:2401.15391 (2024)."},{"key":"e_1_3_2_1_55_1","unstructured":"Qwen Team et al. 2024. Qwen2 technical report. arXiv preprint arXiv:2407.10671 Vol. 2 (2024) 3."},{"key":"e_1_3_2_1_56_1","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v38i17.29875"},{"key":"e_1_3_2_1_57_1","volume-title":"The information bottleneck method. arXiv preprint physics\/0004057","author":"Tishby Naftali","year":"2000","unstructured":"Naftali Tishby, Fernando C Pereira, and William Bialek. 2000. The information bottleneck method. arXiv preprint physics\/0004057 (2000)."},{"key":"e_1_3_2_1_58_1","doi-asserted-by":"publisher","DOI":"10.1109\/ITW.2015.7133169"},{"key":"e_1_3_2_1_59_1","unstructured":"Petar Velickovic Guillem Cucurull Arantxa Casanova Adriana Romero Pietro Li\u00f2 and Yoshua Bengi. 2018. Graph attention networks. In ICLR."},{"key":"e_1_3_2_1_60_1","unstructured":"Petar Velickovic William Fedus William L. Hamilton Pietro Li\u00f2 Yoshua Bengio and R Devon Hjelm. 2019. Deep Graph Infomax. In ICLR."},{"key":"e_1_3_2_1_61_1","volume-title":"NeurIPS","volume":"36","author":"Wang Heng","year":"2024","unstructured":"Heng Wang, Shangbin Feng, Tianxing He, Zhaoxuan Tan, Xiaochuang Han, and Yulia Tsvetkov. 2024. Can language models solve graph problems in natural language? NeurIPS, Vol. 36 (2024)."},{"key":"e_1_3_2_1_62_1","first-page":"968","article-title":"Knowledge-aware graph neural networks with label smoothness regularization for recommender systems","author":"Wang Hongwei","year":"2019","unstructured":"Hongwei Wang, Fuzheng Zhang, Mengdi Zhang, Jure Leskovec, Miao Zhao, Wenjie Li, and Zhongyuan Wang. 2019. Knowledge-aware graph neural networks with label smoothness regularization for recommender systems. In KDD. 968-977.","journal-title":"KDD."},{"key":"e_1_3_2_1_63_1","first-page":"2724","article-title":"Knowledge graph embedding: A survey of approaches and applications","volume":"29","author":"Wang Quan","year":"2017","unstructured":"Quan Wang, Zhendong Mao, Bin Wang, and Li Guo. 2017. Knowledge graph embedding: A survey of approaches and applications. IEEE TKDE, Vol. 29, 12 (2017), 2724-2743.","journal-title":"IEEE TKDE"},{"key":"e_1_3_2_1_64_1","volume-title":"Multi-domain graph foundation models: robust knowledge transfer via topology alignment. ICML","author":"Wang Shuo","year":"2025","unstructured":"Shuo Wang, Bokui Wang, Zhixiang Shen, Boyan Deng, and Zhao Kang. 2025. Multi-domain graph foundation models: robust knowledge transfer via topology alignment. ICML (2025)."},{"key":"e_1_3_2_1_65_1","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v33i01.3301346"},{"key":"e_1_3_2_1_66_1","first-page":"109","article-title":"Graph learning: A survey","volume":"2","author":"Xia Feng","year":"2021","unstructured":"Feng Xia, Ke Sun, Shuo Yu, Abdul Aziz, Liangtian Wan, Shirui Pan, and Huan Liu. 2021. Graph learning: A survey. IEEE TAI, Vol. 2, 2 (2021), 109-127.","journal-title":"IEEE TAI"},{"key":"e_1_3_2_1_67_1","volume-title":"Multimodal-based analysis of single-cell ATAC-seq data enables highly accurate delineation of clinically relevant tumor cell subpopulations. Genome Medicine","author":"Xiong Kewei","year":"2026","unstructured":"Kewei Xiong, Wei Wang, Ruofan Ding, Dinglin Luo, Yangmei Qin, Xudong Zou, Jiguang Wang, Chen Yu, and Lei Li. 2026. Multimodal-based analysis of single-cell ATAC-seq data enables highly accurate delineation of clinically relevant tumor cell subpopulations. Genome Medicine (2026)."},{"key":"e_1_3_2_1_68_1","first-page":"16962","article-title":"Self-supervised heterogeneous graph pre-training based on structural clustering","volume":"35","author":"Yang Yaming","year":"2022","unstructured":"Yaming Yang, Ziyu Guan, Zhe Wang, Wei Zhao, Cai Xu, Weigang Lu, and Jianbin Huang. 2022. Self-supervised heterogeneous graph pre-training based on structural clustering. NeurIPS, Vol. 35 (2022), 16962-16974.","journal-title":"NeurIPS"},{"key":"e_1_3_2_1_69_1","unstructured":"Gustavo Ye. 2024. nano-vectordb. https:\/\/github.com\/gusye1234\/nano-vectordb"},{"key":"e_1_3_2_1_70_1","doi-asserted-by":"publisher","DOI":"10.1145\/3592099"},{"key":"e_1_3_2_1_71_1","first-page":"35277","article-title":"Train once and explain everywhere: Pre-training interpretable graph neural networks","volume":"36","author":"Yin Jun","year":"2023","unstructured":"Jun Yin, Chaozhuo Li, Hao Yan, Jianxun Lian, and Senzhang Wang. 2023. Train once and explain everywhere: Pre-training interpretable graph neural networks. NeurIPS, Vol. 36 (2023), 35277-35299.","journal-title":"NeurIPS"},{"key":"e_1_3_2_1_72_1","first-page":"5812","article-title":"Graph contrastive learning with augmentations","volume":"33","author":"You Yuning","year":"2020","unstructured":"Yuning You, Tianlong Chen, Yongduo Sui, Ting Chen, Zhangyang Wang, and Yang Shen. 2020. Graph contrastive learning with augmentations. NeurIPS, Vol. 33 (2020), 5812-5823.","journal-title":"NeurIPS"},{"key":"e_1_3_2_1_73_1","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v38i15.29596"},{"key":"e_1_3_2_1_74_1","first-page":"1142","article-title":"Samgpt: Text-free graph foundation model for multi-domain pre-training and cross-domain adaptation","author":"Yu Xingtong","year":"2025","unstructured":"Xingtong Yu, Zechuan Gong, Chang Zhou, Yuan Fang, and Hui Zhang. 2025. Samgpt: Text-free graph foundation model for multi-domain pre-training and cross-domain adaptation. In WWW. 1142-1153.","journal-title":"WWW."},{"key":"e_1_3_2_1_75_1","volume-title":"Text-free multi-domain graph pre-training: toward graph foundation models. arXiv preprint arXiv:2405.13934","author":"Yu Xingtong","year":"2024","unstructured":"Xingtong Yu, Chang Zhou, Yuan Fang, and Xinming Zhang. 2024b. Text-free multi-domain graph pre-training: toward graph foundation models. arXiv preprint arXiv:2405.13934 (2024)."},{"key":"e_1_3_2_1_76_1","first-page":"49715","article-title":"Environment-aware dynamic graph learning for out-of-distribution generalization","volume":"36","author":"Yuan Haonan","year":"2023","unstructured":"Haonan Yuan, Qingyun Sun, Xingcheng Fu, Ziwei Zhang, Cheng Ji, Hao Peng, and Jianxin Li. 2023. Environment-aware dynamic graph learning for out-of-distribution generalization. NeurIPS, Vol. 36 (2023), 49715-49747.","journal-title":"NeurIPS"},{"key":"e_1_3_2_1_77_1","volume-title":"GRAVER: Generative graph vocabularies for robust graph foundation models fine-tuning. In NeurIPS.","author":"Yuan Haonan","year":"2025","unstructured":"Haonan Yuan, Qingyun Sun, Junhua Shi, Xingcheng Fu, Bryan Hooi, Jianxin Li, and Philip S Yu. 2025a. GRAVER: Generative graph vocabularies for robust graph foundation models fine-tuning. In NeurIPS."},{"key":"e_1_3_2_1_78_1","unstructured":"Haonan Yuan Qingyun Sun Junhua Shi Xingcheng Fu Bryan Hooi Jianxin Li and Philip S Yu. 2025b. How much can transfer? BRIDGE: Bounded multi-domain graph foundation model with generalization guarantees. In ICML."},{"key":"e_1_3_2_1_79_1","first-page":"912","article-title":"Knowledge graph reasoning with relational digraph","author":"Zhang Yongqi","year":"2022","unstructured":"Yongqi Zhang and Quanming Yao. 2022. Knowledge graph reasoning with relational digraph. In WWW. 912-924.","journal-title":"WWW."},{"key":"e_1_3_2_1_80_1","first-page":"15870","article-title":"Motif-based graph self-supervised learning for molecular property prediction","volume":"34","author":"Zhang Zaixi","year":"2021","unstructured":"Zaixi Zhang, Qi Liu, Hao Wang, Chengqiang Lu, and Chee-Kong Lee. 2021. Motif-based graph self-supervised learning for molecular property prediction. NeurIPS, Vol. 34 (2021), 15870-15882.","journal-title":"NeurIPS"},{"key":"e_1_3_2_1_81_1","first-page":"4443","article-title":"All in one and one for all: A simple yet effective method towards cross-domain graph pretraining","author":"Zhao Haihong","year":"2024","unstructured":"Haihong Zhao, Aochuan Chen, Xiangguo Sun, Hong Cheng, and Jia Li. 2024a. All in one and one for all: A simple yet effective method towards cross-domain graph pretraining. In KDD. 4443-4454.","journal-title":"KDD."},{"key":"e_1_3_2_1_82_1","first-page":"28631","article-title":"Continual forgetting for pre-trained vision models","author":"Zhao Hongbo","year":"2024","unstructured":"Hongbo Zhao, Bolin Ni, Junsong Fan, Yuxi Wang, Yuntao Chen, Gaofeng Meng, and Zhaoxiang Zhang. 2024b. Continual forgetting for pre-trained vision models. In CVPR. 28631-28642.","journal-title":"CVPR."},{"key":"e_1_3_2_1_83_1","first-page":"2183","article-title":"GraphCLIP: Enhancing transferability in graph foundation models for text-attributed graphs","author":"Zhu Yun","year":"2025","unstructured":"Yun Zhu, Haizhou Shi, Xiaotang Wang, Yongchao Liu, Yaoke Wang, Boci Peng, Chuntao Hong, and Siliang Tang. 2025. GraphCLIP: Enhancing transferability in graph foundation models for text-attributed graphs. In WWW. 2183-2197.","journal-title":"WWW."}],"event":{"name":"WWW '26: The ACM Web Conference 2026","location":"Dubai United Arab Emirates","sponsor":["SIGWEB ACM Special Interest Group on Hypertext, Hypermedia, and Web"]},"container-title":["Proceedings of the ACM Web Conference 2026"],"original-title":[],"deposited":{"date-parts":[[2026,4,10]],"date-time":"2026-04-10T16:27:00Z","timestamp":1775838420000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3774904.3792139"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2026,4,12]]},"references-count":83,"alternative-id":["10.1145\/3774904.3792139","10.1145\/3774904"],"URL":"https:\/\/doi.org\/10.1145\/3774904.3792139","relation":{},"subject":[],"published":{"date-parts":[[2026,4,12]]},"assertion":[{"value":"2026-04-12","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}