{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,14]],"date-time":"2026-04-14T07:37:54Z","timestamp":1776152274230,"version":"3.50.1"},"reference-count":210,"publisher":"Elsevier BV","license":[{"start":{"date-parts":[[2026,6,1]],"date-time":"2026-06-01T00:00:00Z","timestamp":1780272000000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.elsevier.com\/tdm\/userlicense\/1.0\/"},{"start":{"date-parts":[[2026,6,1]],"date-time":"2026-06-01T00:00:00Z","timestamp":1780272000000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.elsevier.com\/legal\/tdmrep-license"},{"start":{"date-parts":[[2026,1,9]],"date-time":"2026-01-09T00:00:00Z","timestamp":1767916800000},"content-version":"vor","delay-in-days":0,"URL":"http:\/\/creativecommons.org\/licenses\/by\/4.0\/"}],"content-domain":{"domain":["elsevier.com","sciencedirect.com"],"crossmark-restriction":true},"short-container-title":["Neural Networks"],"published-print":{"date-parts":[[2026,6]]},"DOI":"10.1016\/j.neunet.2026.108567","type":"journal-article","created":{"date-parts":[[2026,1,11]],"date-time":"2026-01-11T15:11:09Z","timestamp":1768144269000},"page":"108567","update-policy":"https:\/\/doi.org\/10.1016\/elsevier_cm_policy","source":"Crossref","is-referenced-by-count":3,"special_numbering":"C","title":["On scientific foundation models: Rigorous definitions, key applications, and a comprehensive survey"],"prefix":"10.1016","volume":"198","author":[{"given":"Sidharth S.","family":"Menon","sequence":"first","affiliation":[]},{"given":"Trishit","family":"Mondal","sequence":"additional","affiliation":[]},{"given":"Shuvayan","family":"Brahmachary","sequence":"additional","affiliation":[]},{"given":"Aniruddha","family":"Panda","sequence":"additional","affiliation":[]},{"given":"Subodh M.","family":"Joshi","sequence":"additional","affiliation":[]},{"given":"Kaushic","family":"Kalyanaraman","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0002-8831-1000","authenticated-orcid":false,"given":"Ameya D.","family":"Jagtap","sequence":"additional","affiliation":[]}],"member":"78","reference":[{"key":"10.1016\/j.neunet.2026.108567_bib0001","doi-asserted-by":"crossref","unstructured":"Abbasi, J., Jagtap, A. D., Moseley, B., Hiorth, A., & Andersen, P. \u00d8. (2025a). Challenges and advancements in modeling shock fronts with physics-informed neural networks: A review and benchmarking study. Neurocomputing, 657, 131440.","DOI":"10.1016\/j.neucom.2025.131440"},{"key":"10.1016\/j.neunet.2026.108567_bib0002","doi-asserted-by":"crossref","DOI":"10.1016\/j.cma.2025.117784","article-title":"History-matching of imbibition flow in fractured porous media using physics-informed neural networks (PINNS)","volume":"437","author":"Abbasi","year":"2025","journal-title":"Computer Methods in Applied Mechanics and Engineering"},{"issue":"3","key":"10.1016\/j.neunet.2026.108567_bib0003","doi-asserted-by":"crossref","first-page":"250","DOI":"10.1016\/S0168-9002(03)01368-8","article-title":"Geant4-a simulation toolkit","volume":"506","author":"Agostinelli","year":"2003","journal-title":"Nuclear Instruments and Methods in Physics Research Section A: Accelerators, Spectrometers, Detectors and Associated Equipment"},{"key":"10.1016\/j.neunet.2026.108567_bib0004","unstructured":"Ahmad, W., Simon, E., Chithrananda, S., Grand, G., & Ramsundar, B. (2022). Chemberta-2: Towards chemical foundation models. arXiv preprint arXiv: 2209.01712."},{"key":"10.1016\/j.neunet.2026.108567_bib0005","doi-asserted-by":"crossref","first-page":"25152","DOI":"10.52202\/079017-0793","article-title":"Universal physics transformers: A framework for efficiently scaling neural operators","volume":"37","author":"Alkin","year":"2024","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0006","unstructured":"Allal, L. B., Lozhkov, A., Bakouch, E., Bl\u00e1zquez, G. M., Penedo, G., Tunstall, L., Marafioti, A., Kydl\u00ed\u010dek, H., Lajar\u00edn, A. P., Srivastav, V. et al. (2025). SmolLM2: When smol goes big\u2013data-centric training of a small language model. arXiv preprint arXiv: 2502.02737."},{"key":"10.1016\/j.neunet.2026.108567_bib0007","unstructured":"Almeldein, A., Alnaggar, M., Archibald, R., Beck, T., Biswas, A., Bostelmann, R., Brewer, W., Bryan, C., Calle, C., Celik, C. et al. (2025). Exploring the capabilities of the frontier large language models for nuclear energy research. arXiv preprint arXiv: 2506.19863."},{"key":"10.1016\/j.neunet.2026.108567_bib0008","unstructured":"Andrychowicz, M., Espeholt, L., Li, D., Merchant, S., Merose, A., Zyda, F., Agrawal, S., & Kalchbrenner, N. (2023). Deep learning for day forecasts from sparse observations. arXiv preprint arXiv: 2306.06079."},{"key":"10.1016\/j.neunet.2026.108567_bib0009","unstructured":"Ansari, A. F., Stella, L., Turkmen, C., Zhang, X., Mercado, P., Shen, H., Shchur, O., Rangapuram, S. S., Arango, S. P., Kapoor, S. et al. (2024). Chronos: Learning the language of time series. arXiv preprint arXiv: 2403.07815."},{"key":"10.1016\/j.neunet.2026.108567_bib0010","first-page":"12449","article-title":"Wav2vec 2.0: A framework for self-supervised learning of speech representations","volume":"33","author":"Baevski","year":"2020","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0011","unstructured":"Bi, K., Xie, L., Zhang, H., Chen, X., Gu, X., & Tian, Q. (2022). Pangu-weather: A 3D high-resolution model for fast and accurate global weather forecast. arXiv preprint arXiv: 2211.02556."},{"key":"10.1016\/j.neunet.2026.108567_bib0012","unstructured":"Bjorck, J., Casta\u00f1eda, F., Cherniadev, N., Da, X., Ding, R., Fan, L., Fang, Y., Fox, D., Hu, F., Huang, S. et al. (2025). Gr00t n1: An open foundation model for generalist humanoid robots. arXiv preprint arXiv: 2503.14734."},{"key":"10.1016\/j.neunet.2026.108567_bib0013","unstructured":"Blumenstiel, B., Braham, N. A. A., Albrecht, C. M., Maurogiovanni, S., & Fraccaro, P. (2025). Ssl4eo-s12 v1. 1: A multimodal, multiseasonal dataset for pretraining, updated. arXiv preprint arXiv: 2503.00168."},{"issue":"8065","key":"10.1016\/j.neunet.2026.108567_bib0014","doi-asserted-by":"crossref","first-page":"1180","DOI":"10.1038\/s41586-025-09005-y","article-title":"A foundation model for the earth system","volume":"641","author":"Bodnar","year":"2025","journal-title":"Nature"},{"key":"10.1016\/j.neunet.2026.108567_bib0015","unstructured":"Bodnar, C., Bruinsma, W. P., Lucic, A., Stanley, M., Brandstetter, J., Garvan, P., Riechert, M., Weyn, J., Dong, H., Vaughan, A. et al. (2024). Aurora: A foundation model of the atmosphere. arXiv preprint arXiv: 2405.13063."},{"key":"10.1016\/j.neunet.2026.108567_bib0016","unstructured":"Bommasani, R., Hudson, D. A., Adeli, E., Altman, R., Arora, S., von Arx, S., Bernstein, M. S., Bohg, J., Bosselut, A., Brunskill, E. et al. (2021). On the opportunities and risks of foundation models. arXiv preprint arXiv: 2108.07258."},{"key":"10.1016\/j.neunet.2026.108567_bib0017","doi-asserted-by":"crossref","DOI":"10.1016\/j.neucom.2024.129272","article-title":"Large language model-based evolutionary optimizer: Reasoning with elitism","volume":"622","author":"Brahmachary","year":"2025","journal-title":"Neurocomputing"},{"key":"10.1016\/j.neunet.2026.108567_bib0018","doi-asserted-by":"crossref","unstructured":"Brohan, A., Brown, N., Carbajal, J., Chebotar, Y., Dabis, J., Finn, C., Gopalakrishnan, K., Hausman, K., Herzog, A., Hsu, J. et al. (2022). Rt-1: Robotics transformer for real-world control at scale. arXiv preprint arXiv: 2212.06817.","DOI":"10.15607\/RSS.2023.XIX.025"},{"key":"10.1016\/j.neunet.2026.108567_bib0019","first-page":"1877","article-title":"Language models are few-shot learners","volume":"33","author":"Brown","year":"2020","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0020","unstructured":"Bubeck, S., Chadrasekaran, V., Eldan, R., Gehrke, J., Horvitz, E., Kamar, E., Lee, P., Lee, Y. T., Li, Y., Lundberg, S. et al. (2023). Sparks of artificial general intelligence: Early experiments with gpt-4."},{"key":"10.1016\/j.neunet.2026.108567_bib0021","unstructured":"Cao, Y., Liu, Y., Yang, L., Yu, R., Schaeffer, H., & Osher, S. (2024). Vicon: Vision in-context operator networks for multi-physics fluid dynamics prediction. arXiv preprint arXiv: 2411.16063."},{"key":"10.1016\/j.neunet.2026.108567_bib0022","unstructured":"Child, R., Gray, S., Radford, A., & Sutskever, I. (2019). Generating long sequences with sparse transformers. arXiv preprint arXiv: 1904.10509."},{"key":"10.1016\/j.neunet.2026.108567_bib0023","unstructured":"Chithrananda, S., Grand, G., & Ramsundar, B. (2020). ChemBERTa: Large-scale self-supervised pretraining for molecular property prediction. arXiv preprint arXiv: 2010.09885."},{"key":"10.1016\/j.neunet.2026.108567_bib0024","doi-asserted-by":"crossref","unstructured":"Choi, J., Nam, G., Choi, J., & Jung, Y. (2025a). A perspective on foundation models in chemistry. JACS Au, 5(4), 1499\u20131518. arXiv preprint arXiv: 2505.22904.","DOI":"10.1021\/jacsau.4c01160"},{"key":"10.1016\/j.neunet.2026.108567_bib0025","unstructured":"Choi, Y., Cheung, S. W., Kim, Y., Tsai, P.-H., Diaz, A. N., Zanardi, I., Chung, S. W., Copeland, D. M., Kendrick, C., Anderson, W. et al. (2025b). Defining foundation models for computational science: A call for clarity and rigor. arXiv preprint arXiv: 2505.22904."},{"key":"10.1016\/j.neunet.2026.108567_bib0026","doi-asserted-by":"crossref","first-page":"145","DOI":"10.1016\/j.rse.2018.09.002","article-title":"The harmonized landsat and sentinel-2 surface reflectance data set","volume":"219","author":"Claverie","year":"2018","journal-title":"Remote Sensing of Environment"},{"issue":"5","key":"10.1016\/j.neunet.2026.108567_bib0027","doi-asserted-by":"crossref","first-page":"2489","DOI":"10.1785\/0220230021","article-title":"Mlaapde: A machine learning dataset for determining global earthquake source parameters","volume":"94","author":"Cole","year":"2023","journal-title":"Seismological Research Letters"},{"key":"10.1016\/j.neunet.2026.108567_bib0028","first-page":"197","article-title":"Satmae: Pre-training transformers for temporal and multi-spectral satellite imagery","volume":"35","author":"Cong","year":"2022","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0029","doi-asserted-by":"crossref","first-page":"16344","DOI":"10.52202\/068431-1189","article-title":"Flashattention: Fast and memory-efficient exact attention with IO-awareness","volume":"35","author":"Dao","year":"2022","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0030","unstructured":"Dao, T., & Gu, A. (2024). Transformers are SSMs: Generalized models and efficient algorithms through structured state space duality. arXiv preprint arXiv: 2405.21060."},{"key":"10.1016\/j.neunet.2026.108567_bib0031","unstructured":"Das, A., Kong, W., Leach, A., Mathur, S., Rajat, S., & Yu, R. (2023). Long-term forecasting with tide: Time-series dense encoder. arXiv preprint arXiv: 2304.08424."},{"key":"10.1016\/j.neunet.2026.108567_bib0032","series-title":"Forty-first international conference on machine learning","article-title":"A decoder-only foundation model for time-series forecasting","author":"Das","year":"2024"},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0033","doi-asserted-by":"crossref","first-page":"83","DOI":"10.1093\/imanum\/drac085","article-title":"Error estimates for physics-informed neural networks approximating the Navier\u2013Stokes equations","volume":"44","author":"De Ryck","year":"2024","journal-title":"IMA Journal of Numerical Analysis"},{"key":"10.1016\/j.neunet.2026.108567_bib0034","series-title":"Proceedings of the 17th ACM international conference on web search and data mining","first-page":"161","article-title":"K2: A foundation language model for geoscience knowledge understanding and utilization","author":"Deng","year":"2024"},{"key":"10.1016\/j.neunet.2026.108567_bib0035","series-title":"Proceedings of the 2019 conference of the North American chapter of the association for computational linguistics: Human language technologies, volume 1 (long and short papers)","first-page":"4171","article-title":"Bert: Pre-training of deep bidirectional transformers for language understanding","author":"Devlin","year":"2019"},{"key":"10.1016\/j.neunet.2026.108567_bib0036","unstructured":"Dong, X., Yu, W., Lin, J., Guo, Z., Wang, H., & Yang, J. (2025). Light-weighted foundation model for seismic data processing based on representative and non-redundant pre-training dataset. arXiv preprint arXiv: 2503.10092."},{"key":"10.1016\/j.neunet.2026.108567_bib0037","unstructured":"Dosovitskiy, A., Beyer, L., Kolesnikov, A., Weissenborn, D., Zhai, X., Unterthiner, T., Dehghani, M., Minderer, M., Heigold, G., Gelly, S. et al. (2020). An image is worth 16x16 words: Transformers for image recognition at scale. arXiv preprint arXiv: 2010.11929."},{"key":"10.1016\/j.neunet.2026.108567_bib0038","doi-asserted-by":"crossref","first-page":"25","DOI":"10.1016\/j.rse.2011.11.026","article-title":"Sentinel-2: ESA\u2019s optical high-resolution mission for GMES operational services","volume":"120","author":"Drusch","year":"2012","journal-title":"Remote Sensing of Environment"},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0039","doi-asserted-by":"crossref","first-page":"138","DOI":"10.1038\/s41524-020-00406-3","article-title":"Benchmarking materials property prediction methods: the matbench test set and automatminer reference algorithm","volume":"6","author":"Dunn","year":"2020","journal-title":"NPJ Computational Materials"},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0040","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1038\/s41467-022-32483-x","article-title":"Deep learning for twelve hour precipitation forecasts","volume":"13","author":"Espeholt","year":"2022","journal-title":"Nature Communications"},{"key":"10.1016\/j.neunet.2026.108567_bib0041","series-title":"KDD","first-page":"226","article-title":"A density-based algorithm for discovering clusters in large spatial databases with noise","volume":"vol. 96","author":"Ester","year":"1996"},{"issue":"5","key":"10.1016\/j.neunet.2026.108567_bib0042","doi-asserted-by":"crossref","first-page":"1937","DOI":"10.5194\/gmd-9-1937-2016","article-title":"Overview of the coupled model intercomparison project phase 6 (CMIP6) experimental design and organization","volume":"9","author":"Eyring","year":"2016","journal-title":"Geoscientific Model Development"},{"key":"10.1016\/j.neunet.2026.108567_bib0043","doi-asserted-by":"crossref","DOI":"10.1109\/TKDE.2025.3555328","article-title":"Ten challenging problems in federated foundation models","author":"Fan","year":"2025","journal-title":"IEEE Transactions on Knowledge and Data Engineering"},{"issue":"5","key":"10.1016\/j.neunet.2026.108567_bib0044","doi-asserted-by":"crossref","first-page":"701","DOI":"10.1177\/02783649241281508","article-title":"Foundation models in robotics: Applications, challenges, and the future","volume":"44","author":"Firoozi","year":"2025","journal-title":"The International Journal of Robotics Research"},{"key":"10.1016\/j.neunet.2026.108567_bib0045","doi-asserted-by":"crossref","DOI":"10.1016\/j.parco.2022.102982","article-title":"NekRS, a GPU-accelerated spectral element navier\u2013stokes solver","volume":"114","author":"Fischer","year":"2022","journal-title":"Parallel Computing"},{"key":"10.1016\/j.neunet.2026.108567_bib0046","series-title":"IGARSS 2024-2024 IEEE international geoscience and remote sensing symposium","first-page":"2935","article-title":"Major tom: Expandable datasets for earth observation","author":"Francis","year":"2024"},{"key":"10.1016\/j.neunet.2026.108567_bib0047","doi-asserted-by":"crossref","first-page":"5506","DOI":"10.52202\/075280-0241","article-title":"Croma: Remote sensing representations with contrastive radar-optical masked autoencoders","volume":"36","author":"Fuller","year":"2023","journal-title":"Advances in Neural Information Processing Systems"},{"issue":"14","key":"10.1016\/j.neunet.2026.108567_bib0048","doi-asserted-by":"crossref","first-page":"5419","DOI":"10.1175\/JCLI-D-16-0758.1","article-title":"The modern-era retrospective analysis for research and applications, version 2 (MERRA-2)","volume":"30","author":"Gelaro","year":"2017","journal-title":"Journal of Climate"},{"key":"10.1016\/j.neunet.2026.108567_bib0049","unstructured":"Gilpin, W. (2021). Chaos as an interpretable benchmark for forecasting and data-driven modelling. arXiv preprint arXiv: 2110.05266."},{"key":"10.1016\/j.neunet.2026.108567_bib0050","unstructured":"Giroux, J., & Fanelli, C. (2025). Towards foundation models for experimental readout systems combining discrete and continuous data. arXiv preprint arXiv: 2505.08736."},{"key":"10.1016\/j.neunet.2026.108567_bib0051","doi-asserted-by":"crossref","DOI":"10.1016\/j.cma.2023.116674","article-title":"Learning stiff chemical kinetics using extended deep neural operators","volume":"419","author":"Goswami","year":"2024","journal-title":"Computer Methods in Applied Mechanics and Engineering"},{"key":"10.1016\/j.neunet.2026.108567_bib0052","unstructured":"Guo, D., Yang, D., Zhang, H., Song, J., Zhang, R., Xu, R., Zhu, Q., Ma, S., Wang, P., Bi, X. et al. (2025a). Deepseek-r1: Incentivizing reasoning capability in LLMs via reinforcement learning. arXiv preprint arXiv preprint arXiv: 2506.12948."},{"key":"10.1016\/j.neunet.2026.108567_bib0053","unstructured":"Guo, H., Yang, S., Goel, T., Xing, E. P., Dao, T., & Kim, Y. (2025b). Log-linear attention. arXiv preprint arXiv: 2506.04761."},{"issue":"10","key":"10.1016\/j.neunet.2026.108567_bib0054","doi-asserted-by":"crossref","DOI":"10.1111\/exsy.13654","article-title":"When geoscience meets generative AI and large language models: Foundations, trends, and future challenges","volume":"41","author":"Hadid","year":"2024","journal-title":"Expert Systems"},{"issue":"8","key":"10.1016\/j.neunet.2026.108567_bib0055","doi-asserted-by":"crossref","first-page":"1481","DOI":"10.1038\/s41592-024-02305-7","article-title":"Large-scale foundation model on single-cell transcriptomics","volume":"21","author":"Hao","year":"2024","journal-title":"Nature Methods"},{"key":"10.1016\/j.neunet.2026.108567_bib0056","unstructured":"He, P., Liu, X., Gao, J., & Chen, W. (2020). Deberta: Decoding-enhanced bert with disentangled attention. arXiv preprint arXiv: 2006.03654."},{"key":"10.1016\/j.neunet.2026.108567_bib0057","first-page":"72525","article-title":"Poseidon: Efficient foundation models for PDEs","volume":"37","author":"Herde","year":"2024","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0058","article-title":"Era5 hourly data on single levels from 1940 to present","volume":"10","author":"Hersbach","year":"2023","journal-title":"Copernicus Climate Change Service (c3s) Climate Data Store (CDS)"},{"issue":"730","key":"10.1016\/j.neunet.2026.108567_bib0059","doi-asserted-by":"crossref","first-page":"1999","DOI":"10.1002\/qj.3803","article-title":"The ERA5 global reanalysis","volume":"146","author":"Hersbach","year":"2020","journal-title":"Quarterly Journal of the Royal Meteorological Society"},{"key":"10.1016\/j.neunet.2026.108567_bib0060","first-page":"6840","article-title":"Denoising diffusion probabilistic models","volume":"33","author":"Ho","year":"2020","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0061","unstructured":"Ho, J., Kalchbrenner, N., Weissenborn, D., & Salimans, T. (2019). Axial attention in multidimensional transformers. arXiv preprint arXiv: 1912.12180."},{"key":"10.1016\/j.neunet.2026.108567_bib0062","unstructured":"Hong, D., Zhang, B., Li, X., Li, Y., Li, C., Yao, J., Yokoya, N., Li, H., Ghamisi, P., Jia, X. et al. (2023). SpectralGPT: Spectral remote sensing foundation model. arXiv preprint arXiv: 2311.07113."},{"key":"10.1016\/j.neunet.2026.108567_bib0063","unstructured":"Hu, J., Guo, D., Si, Z., Liu, D., Diao, Y., Zhang, J., Zhou, J., & Wang, M. (2025). Mol-Mamba: Enhancing molecular representation with structural and electronic insights,. arXiv preprint arXiv: 2504.11171."},{"key":"10.1016\/j.neunet.2026.108567_bib0064","doi-asserted-by":"crossref","unstructured":"Hu, Z., Jagtap, A. D., Karniadakis, G. E., & Kawaguchi, K. (2022). When do extended physics-informed neural networks (XPINNs) improve generalization?SIAM Journal on Scientific Computing, 44, 5, A3158-A3182.","DOI":"10.1137\/21M1447039"},{"key":"10.1016\/j.neunet.2026.108567_bib0065","doi-asserted-by":"crossref","DOI":"10.1016\/j.engappai.2023.107183","article-title":"Augmented physics-informed neural networks (APINNs): A gating network-based soft domain decomposition methodology","volume":"126","author":"Hu","year":"2023","journal-title":"Engineering Applications of Artificial Intelligence"},{"issue":"12","key":"10.1016\/j.neunet.2026.108567_bib0066","doi-asserted-by":"crossref","first-page":"3601","DOI":"10.1038\/s41591-024-03233-x","article-title":"A foundation model for clinician-centered drug repurposing","volume":"30","author":"Huang","year":"2024","journal-title":"Nature Medicine"},{"issue":"5","key":"10.1016\/j.neunet.2026.108567_bib0067","doi-asserted-by":"crossref","DOI":"10.4208\/cicp.OA-2020-0164","article-title":"Extended physics-informed neural networks (XPINNs): A generalized space-time domain decomposition based deep learning framework for nonlinear partial differential equations","volume":"28","author":"Jagtap","year":"2020","journal-title":"Communications in Computational Physics"},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0068","doi-asserted-by":"crossref","DOI":"10.1615\/JMachLearnModelComput.2023047367","article-title":"How important are activation functions in regression and classification? a survey, performance comparison, and future directions","volume":"4","author":"Jagtap","year":"2023","journal-title":"Journal of Machine Learning for Modeling and Computing"},{"issue":"2239","key":"10.1016\/j.neunet.2026.108567_bib0069","article-title":"Locally adaptive activation functions with slope recovery for deep and physics-informed neural networks","volume":"476","author":"Jagtap","year":"2020","journal-title":"Proceedings of the Royal Society A"},{"key":"10.1016\/j.neunet.2026.108567_bib0070","doi-asserted-by":"crossref","DOI":"10.1016\/j.jcp.2019.109136","article-title":"Adaptive activation functions accelerate convergence in deep and physics-informed neural networks","volume":"404","author":"Jagtap","year":"2020","journal-title":"Journal of Computational Physics"},{"key":"10.1016\/j.neunet.2026.108567_bib0071","doi-asserted-by":"crossref","DOI":"10.1016\/j.cma.2020.113028","article-title":"Conservative physics-informed neural networks on discrete domains for conservation laws: Applications to forward and inverse problems","volume":"365","author":"Jagtap","year":"2020","journal-title":"Computer Methods in Applied Mechanics and Engineering"},{"key":"10.1016\/j.neunet.2026.108567_bib0072","doi-asserted-by":"crossref","DOI":"10.1016\/j.jcp.2022.111402","article-title":"Physics-informed neural networks for inverse problems in supersonic flows","volume":"466","author":"Jagtap","year":"2022","journal-title":"Journal of Computational Physics"},{"key":"10.1016\/j.neunet.2026.108567_bib0073","doi-asserted-by":"crossref","DOI":"10.1016\/j.oceaneng.2022.110775","article-title":"Deep learning of inverse water waves problems using multi-fidelity data: Application to serre\u2013green\u2013naghdi equations","volume":"248","author":"Jagtap","year":"2022","journal-title":"Ocean Engineering"},{"key":"10.1016\/j.neunet.2026.108567_bib0074","doi-asserted-by":"crossref","first-page":"165","DOI":"10.1016\/j.neucom.2021.10.036","article-title":"Deep kronecker neural networks: A general framework for neural networks with adaptive activation functions","volume":"468","author":"Jagtap","year":"2022","journal-title":"Neurocomputing"},{"key":"10.1016\/j.neunet.2026.108567_bib0075","doi-asserted-by":"crossref","unstructured":"Jakubik, J., Roy, S., Phillips, C. E., Fraccaro, P., Godwin, D., Zadrozny, B., Szwarcman, D., Gomes, C., Nyirjesy, G., Edwards, B. et al. (2023). Foundation models for generalist geospatial artificial intelligence. arXiv preprint arXiv: 2310.18660,.","DOI":"10.2139\/ssrn.4804009"},{"key":"10.1016\/j.neunet.2026.108567_bib0076","unstructured":"Jakubik, J., Yang, F., Blumenstiel, B., Scheurer, E., Sedona, R., Maurogiovanni, S., Bosmans, J., Dionelis, N., Marsocci, V., Kopp, N. et al. (2025). Terramind: Large-scale generative multimodality for earth observation. arXiv preprint arXiv: 2504.11171."},{"key":"10.1016\/j.neunet.2026.108567_bib0077","series-title":"Conference on robot learning","first-page":"991","article-title":"Bc-z: Zero-shot task generalization with robotic imitation learning","author":"Jang","year":"2022"},{"key":"10.1016\/j.neunet.2026.108567_bib0078","unstructured":"Kajino, H. (2019). Molecular hypergraph grammar with its application to molecular optimization. arXiv preprint arXiv: 2001.04451."},{"issue":"11","key":"10.1016\/j.neunet.2026.108567_bib0079","doi-asserted-by":"crossref","DOI":"10.1088\/1748-0221\/15\/11\/C11006","article-title":"Developing high-performance DIRC detector for the future electron ion collider experiment","volume":"15","author":"Kalicy","year":"2020","journal-title":"Journal of Instrumentation"},{"issue":"D1","key":"10.1016\/j.neunet.2026.108567_bib0080","doi-asserted-by":"crossref","first-page":"D1516","DOI":"10.1093\/nar\/gkae1059","article-title":"Pubchem 2025 update","volume":"53","author":"Kim","year":"2025","journal-title":"Nucleic Acids Research"},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0081","doi-asserted-by":"crossref","first-page":"387","DOI":"10.1038\/s41597-020-00723-8","article-title":"A band-gap database for semiconducting inorganic materials calculated with hybrid functional","volume":"7","author":"Kim","year":"2020","journal-title":"Scientific Data"},{"key":"10.1016\/j.neunet.2026.108567_bib0082","series-title":"AI for accelerated materials design - NeurIPS 2023 workshop","article-title":"MHG-GNN: Combination of molecular hypergraph grammar with graph neural network","author":"Kishimoto","year":"2023"},{"key":"10.1016\/j.neunet.2026.108567_bib0083","unstructured":"Kitaev, N., Kaiser, \u0141., & Levskaya, A. (2020). Reformer: The efficient transformer. arXiv preprint arXiv: 2001.04451."},{"issue":"21","key":"10.1016\/j.neunet.2026.108567_bib0084","doi-asserted-by":"crossref","DOI":"10.1073\/pnas.2101784118","article-title":"Machine learning\u2013accelerated computational fluid dynamics","volume":"118","author":"Kochkov","year":"2021","journal-title":"Proceedings of the National Academy of Sciences"},{"issue":"8027","key":"10.1016\/j.neunet.2026.108567_bib0085","doi-asserted-by":"crossref","first-page":"1060","DOI":"10.1038\/s41586-024-07744-y","article-title":"Neural general circulation models for weather and climate","volume":"632","author":"Kochkov","year":"2024","journal-title":"Nature"},{"issue":"4","key":"10.1016\/j.neunet.2026.108567_bib0086","article-title":"Self-referencing embedded strings (SELFIES): A 100 robust molecular string representation","volume":"1","author":"Krenn","year":"2020","journal-title":"Machine Learning: Science and Technology"},{"key":"10.1016\/j.neunet.2026.108567_bib0087","unstructured":"Ku, S.-H., Hager, R., Scheinberg, A., Dominski, J., Sharma, A., Churchill, M., Choi, J., Sturdevant, B., Moll\u00e9n, A., Wilkie, G., Chang, C.-S., Yoon, E., Adams, M., Seo, J., Koh, S., D\u2019Azevedo, E., Abbott, S., Worley, P. H., Ethier, S., Park, G., Lang, J., MacKie-Mason, B., Germaschewski, K., Suchyta, E., Carey, V., Cole, M., Trivedi, P., & Chowdhury, J., et al. (2018). Xgc. [Computer Software] https:\/\/doi.org\/10.11578\/dc.20180627.11. 10.11578\/dc.20180627.11."},{"key":"10.1016\/j.neunet.2026.108567_bib0088","first-page":"51080","article-title":"Geo-bench: Toward foundation models for earth monitoring","volume":"36","author":"Lacoste","year":"2023","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0089","unstructured":"Lai, J., Bao, A., & Gilpin, W. (2025). Panda: A pretrained forecast model for universal representation of chaotic dynamics. arXiv preprint arXiv: 2505.13755."},{"issue":"6677","key":"10.1016\/j.neunet.2026.108567_bib0090","doi-asserted-by":"crossref","first-page":"1416","DOI":"10.1126\/science.adi2336","article-title":"Learning skillful medium-range global weather forecasting","volume":"382","author":"Lam","year":"2023","journal-title":"Science"},{"key":"10.1016\/j.neunet.2026.108567_bib0091","unstructured":"Lehmann, F., Ozdemir, F., Soja, B., Hoefler, T., Mishra, S., & Schemm, S. (2025). Finetuning a weather foundation model with lightweight decoders for unseen physical processes. arXiv preprint arXiv: 2506.19088."},{"key":"10.1016\/j.neunet.2026.108567_bib0092","article-title":"Seist: A foundational deep learning model for earthquake monitoring tasks","author":"Li","year":"2024","journal-title":"IEEE Transactions on Geoscience and Remote Sensing"},{"key":"10.1016\/j.neunet.2026.108567_bib0093","unstructured":"Li, Z., Chen, G., Liu, S., Wang, S., VS, V., Ji, Y., Lan, S., Zhang, H., Zhao, Y., Radhakrishnan, S. et al. (2025). Eagle 2: Building post-training data strategies from scratch for frontier vision-language models. arXiv preprint arXiv: 2501.14818."},{"key":"10.1016\/j.neunet.2026.108567_bib0094","unstructured":"Li, Z., Kovachki, N., Azizzadenesheli, K., Liu, B., Bhattacharya, K., Stuart, A., & Anandkumar, A. (2020). Fourier neural operator for parametric partial differential equations. arXiv preprint arXiv: 2010.08895."},{"key":"10.1016\/j.neunet.2026.108567_bib0095","series-title":"Proceedings of the 30th ACM SIGKDD conference on knowledge discovery and data mining","first-page":"6555","article-title":"Foundation models for time series analysis: A tutorial and survey","author":"Liang","year":"2024"},{"key":"10.1016\/j.neunet.2026.108567_bib0096","doi-asserted-by":"crossref","DOI":"10.1016\/j.cpc.2022.108313","article-title":"Hybrid simulation of energetic particles interacting with magnetohydrodynamics using a slow manifold algorithm and GPU acceleration","volume":"275","author":"Liu","year":"2022","journal-title":"Computer Physics Communications"},{"key":"10.1016\/j.neunet.2026.108567_bib0097","series-title":"The symbiosis of deep learning and differential equations III","article-title":"Does in-context operator learning generalize to domain-shifted settings?","author":"Liu","year":"2023"},{"key":"10.1016\/j.neunet.2026.108567_bib0098","unstructured":"Liu, Q., & Ma, J. (2024). Foundation models for geophysics: Review and perspective. arXiv preprint arXiv: 2406.03163."},{"key":"10.1016\/j.neunet.2026.108567_bib0099","unstructured":"Liu, T., M\u00fcnchmeyer, J., Laurenti, L., Marone, C., de Hoop, M. V., & Dokmani\u0107, I. (2024a). SeisLM: a foundation model for seismic waveforms. arXiv preprint arXiv: 2410.15765."},{"key":"10.1016\/j.neunet.2026.108567_bib0100","unstructured":"Liu, Y., Hu, T., Zhang, H., Wu, H., Wang, S., Ma, L., & Long, M. (2023b). iTransFormer: Inverted transformers are effective for time series forecasting. arXiv preprint arXiv: 2310.06625."},{"key":"10.1016\/j.neunet.2026.108567_bib0101","unstructured":"Liu, Y., Ott, M., Goyal, N., Du, J., Joshi, M., Chen, D., Levy, O., Lewis, M., Zettlemoyer, L., & Stoyanov, V. (2019). RoBERTa: A robustly optimized BERT pretraining approach,. arXiv preprint arXiv: 2409.09811."},{"key":"10.1016\/j.neunet.2026.108567_bib0102","unstructured":"Liu, Y., Sun, J., He, X., Pinney, G., Zhang, Z., & Schaeffer, H. (2024b). Prose-FD: A multimodal pde foundation model for learning multiple operators for forecasting fluid dynamics. arXiv preprint arXiv: 2409.09811."},{"key":"10.1016\/j.neunet.2026.108567_bib0103","doi-asserted-by":"crossref","DOI":"10.1016\/j.neunet.2024.106707","article-title":"Prose: Predicting multiple operators and symbolic expressions using multimodal transformers","volume":"180","author":"Liu","year":"2024","journal-title":"Neural Networks"},{"issue":"3","key":"10.1016\/j.neunet.2026.108567_bib0104","doi-asserted-by":"crossref","first-page":"218","DOI":"10.1038\/s42256-021-00302-5","article-title":"Learning nonlinear operators via deepONet based on the universal approximation theorem of operators","volume":"3","author":"Lu","year":"2021","journal-title":"Nature Machine Intelligence"},{"key":"10.1016\/j.neunet.2026.108567_bib0105","unstructured":"Lu, S., Bigoulaeva, I., Sachdeva, R., Madabushi, H. T., & Gurevych, I. (2023). Are emergent abilities in large language models just in-context learning?arXiv preprint arXiv: 2309.01809."},{"key":"10.1016\/j.neunet.2026.108567_bib0106","series-title":"Proceedings of the IEEE\/CVF international conference on computer vision","first-page":"9414","article-title":"Seasonal contrast: Unsupervised pre-training from uncurated remote sensing data","author":"Manas","year":"2021"},{"key":"10.1016\/j.neunet.2026.108567_bib0107","doi-asserted-by":"crossref","DOI":"10.1016\/j.cma.2019.112789","article-title":"Physics-informed neural networks for high-speed flows","volume":"360","author":"Mao","year":"2020","journal-title":"Computer Methods in Applied Mechanics and Engineering"},{"issue":"19","key":"10.1016\/j.neunet.2026.108567_bib0108","doi-asserted-by":"crossref","first-page":"2315","DOI":"10.1080\/00268976.2017.1333644","article-title":"Thirty years of density functional theory in computational chemistry: An overview and extensive assessment of 200 density functionals","volume":"115","author":"Mardirossian","year":"2017","journal-title":"Molecular Physics"},{"key":"10.1016\/j.neunet.2026.108567_bib0109","unstructured":"McCabe, M., Blancard, B. R.-S., Parker, L. H., Ohana, R., Cranmer, M., Bietti, A., Eickenberg, M., Golkar, S., Krawezik, G., Lanusse, F. et al. (2023). Multiple physics pretraining for physical surrogate models. arXiv preprint arXiv: 2310.02994."},{"key":"10.1016\/j.neunet.2026.108567_bib0110","first-page":"119301","article-title":"Multiple physics pretraining for spatiotemporal surrogate models","volume":"37","author":"McCabe","year":"2024","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0111","unstructured":"M\u00e9ndez-Lucio, O., Nicolaou, C., & Earnshaw, B. (2022). Mole: A molecular foundation model for drug discovery. arXiv preprint arXiv: 2211.02657."},{"key":"10.1016\/j.neunet.2026.108567_bib0112","unstructured":"Mendieta, M., Han, B., Shi, X., Zhu, Y., Chen, C., & Li, M. (2023). GFM: Building geospatial foundation models via continual pretraining. arXiv preprint arXiv: 2302.04476, 3."},{"key":"10.1016\/j.neunet.2026.108567_bib0113","doi-asserted-by":"crossref","DOI":"10.1016\/j.jcp.2019.109020","article-title":"A composite neural network that learns from multi-fidelity data: Application to function approximation and inverse PDE problems","volume":"401","author":"Meng","year":"2020","journal-title":"Journal of Computational Physics"},{"key":"10.1016\/j.neunet.2026.108567_bib0114","doi-asserted-by":"crossref","unstructured":"Menon, S. S., & Jagtap, A. D. (2025). Anant-Net: Breaking the curse of dimensionality with scalable and interpretable neural surrogate for high-dimensional pdes. Computer Methods in Applied Mechanics and Engineering, 447, 118403.","DOI":"10.1016\/j.cma.2025.118403"},{"key":"10.1016\/j.neunet.2026.108567_bib0115","doi-asserted-by":"crossref","DOI":"10.1038\/s41586-023-06735-9","article-title":"Scaling deep learning for materials discovery","author":"Merchant","year":"2023","journal-title":"Nature"},{"key":"10.1016\/j.neunet.2026.108567_bib0116","unstructured":"Mhaskar, H. N., Tsoukanis, E., & Jagtap, A. D. (2025). An approximation theory perspective on machine learning. arXiv preprint arXiv: 2506.02168."},{"issue":"12","key":"10.1016\/j.neunet.2026.108567_bib0117","doi-asserted-by":"crossref","first-page":"5509","DOI":"10.5194\/essd-13-5509-2021","article-title":"Instance\u2013the Italian seismic dataset for machine learning","volume":"13","author":"Michelini","year":"2021","journal-title":"Earth System Science Data"},{"key":"10.1016\/j.neunet.2026.108567_bib0118","doi-asserted-by":"crossref","DOI":"10.1016\/j.newton.2025.100016","article-title":"Multimodal foundation models for material property prediction and discovery","author":"Moro","year":"2025","journal-title":"Newton"},{"key":"10.1016\/j.neunet.2026.108567_bib0119","doi-asserted-by":"crossref","first-page":"179464","DOI":"10.1109\/ACCESS.2019.2947848","article-title":"Stanford earthquake dataset (stead): A global data set of seismic signals for AI","volume":"7","author":"Mousavi","year":"2019","journal-title":"IEEE Access"},{"key":"10.1016\/j.neunet.2026.108567_bib0120","unstructured":"Mukkavilli, S. K., Civitarese, D. S., Schmude, J., Jakubik, J., Jones, A., Nguyen, N., Phillips, C., Roy, S., Singh, S., Watson, C. et al. (2023). AI foundation models for weather and climate: Applications, design, and implementation. arXiv preprint arXiv: 2309.10808."},{"key":"10.1016\/j.neunet.2026.108567_bib0121","doi-asserted-by":"crossref","unstructured":"Negrini, E., Liu, Y., Yang, L., Osher, S. J., & Schaeffer, H. (2025). A multimodal PDE foundation model for prediction and scientific text descriptions. arXiv preprint arXiv: 2502.06026.","DOI":"10.4208\/jml.250214"},{"key":"10.1016\/j.neunet.2026.108567_bib0122","unstructured":"Nguyen, T., Brandstetter, J., Kapoor, A., Gupta, J. K., & Grover, A. (2023). Climax: A foundation model for weather and climate. arXiv preprint arXiv: 2301.10343."},{"key":"10.1016\/j.neunet.2026.108567_bib0123","unstructured":"Nguyen, T., Koneru, A., Li, S. et al. (2025). Physix: A foundation model for physics simulations. arXiv preprint arXiv: 2506.17774."},{"key":"10.1016\/j.neunet.2026.108567_bib0124","doi-asserted-by":"crossref","unstructured":"Ni, Y., Hutko, A., Skene, F., Denolle, M., Malone, S., Bodin, P., Hartog, R., & Wright, A. (2023). Curated pacific northwest AI-ready seismic dataset,.","DOI":"10.31223\/X53W9Q"},{"key":"10.1016\/j.neunet.2026.108567_bib0125","unstructured":"Nie, Y., Nguyen, N. H., Sinthong, P., & Kalagnanam, J. (2022). A time series is worth 64 words: Long-term forecasting with transformers. arXiv preprint arXiv: 2211.14730."},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0126","doi-asserted-by":"crossref","first-page":"485","DOI":"10.1093\/gji\/ggae049","article-title":"Obstransformer: A deep-learning seismic phase picker for obs data using automated labelling and transfer learning","volume":"237","author":"Niksejel","year":"2024","journal-title":"Geophysical Journal International"},{"key":"10.1016\/j.neunet.2026.108567_bib0127","unstructured":"Oikonomou, O., Lingsch, L., Grund, D., Mishra, S., & Kissas, G. (2025). Neuro-symbolic AI for analytical solutions of differential equations. arXiv preprint arXiv preprint arXiv: 2304.07193."},{"key":"10.1016\/j.neunet.2026.108567_bib0128","unstructured":"Oquab, M., Darcet, T., Moutakanni, T., Vo, H., Szafraniec, M., Khalidov, V., Fernandez, P., Haziza, D., Massa, F., El-Nouby, A. et al. (2023). Dinov2: Learning robust visual features without supervision. arXiv preprint arXiv: 2304.07193."},{"key":"10.1016\/j.neunet.2026.108567_bib0129","unstructured":"Oreshkin, B. N., Carpov, D., Chapados, N., & Bengio, Y. (2019). N-BEATS: Neural basis expansion analysis for interpretable time series forecasting. arXiv preprint arXiv: 1905.10437."},{"issue":"10","key":"10.1016\/j.neunet.2026.108567_bib0130","doi-asserted-by":"crossref","first-page":"1345","DOI":"10.1109\/TKDE.2009.191","article-title":"A survey on transfer learning","volume":"22","author":"Pan","year":"2009","journal-title":"IEEE Transactions on Knowledge and Data Engineering"},{"key":"10.1016\/j.neunet.2026.108567_bib0131","doi-asserted-by":"crossref","unstructured":"Park, D., Li, S., Huang, Y., Luo, X., Yu, H., Go, Y., Pinkenburg, C., Lin, Y., Yoo, S., Osborn, J. et al. (2025). Fm4npp: A scaling foundation model for nuclear and particle physics. arXiv preprint arXiv: 2508.14087.","DOI":"10.2139\/ssrn.5389206"},{"key":"10.1016\/j.neunet.2026.108567_bib0132","unstructured":"Park, K. V. (2025). Towards a foundation model for physics-informed neural networks: Multi-PDE learning with active sampling. arXiv preprint arXiv: 2502.07425."},{"key":"10.1016\/j.neunet.2026.108567_bib0133","unstructured":"Pathak, J., Subramanian, S., Harrington, P., Raja, S., Chattopadhyay, A., Mardani, M., Kurth, T., Hall, D., Li, Z., Azizzadenesheli, K. et al. (2022). FourcastNet: A global data-driven high-resolution weather model using adaptive fourier neural operators. arXiv preprint arXiv: 2202.11214."},{"key":"10.1016\/j.neunet.2026.108567_bib0134","doi-asserted-by":"crossref","DOI":"10.1016\/j.jcp.2023.112464","article-title":"A unified scalable framework for causal sweeping strategies for physics-informed neural networks (PINNs) and their temporal decompositions","volume":"493","author":"Penwarden","year":"2023","journal-title":"Journal of Computational Physics"},{"key":"10.1016\/j.neunet.2026.108567_bib0135","series-title":"Proceedings of the AAAI conference on artificial intelligence","article-title":"Film: Visual reasoning with a general conditioning layer","volume":"vol. 32","author":"Perez","year":"2018"},{"key":"10.1016\/j.neunet.2026.108567_bib0136","doi-asserted-by":"crossref","DOI":"10.1016\/j.cma.2024.116996","article-title":"RiemannoNets: Interpretable neural operators for Riemann problems","volume":"426","author":"Peyvan","year":"2024","journal-title":"Computer Methods in Applied Mechanics and Engineering"},{"key":"10.1016\/j.neunet.2026.108567_bib0137","series-title":"AI for accelerated materials design-NeurIPS 2023 workshop","article-title":"MTEncoder: A multi-task pretrained transformer encoder for materials representation learning","author":"Prein","year":"2023"},{"key":"10.1016\/j.neunet.2026.108567_bib0138","series-title":"AI for accelerated materials design - ICLR 2025","article-title":"Dynamic fusion for a multimodal foundation model for materials","author":"Priyadarsini","year":"2025"},{"key":"10.1016\/j.neunet.2026.108567_bib0139","unstructured":"Priyadarsini, I., Takeda, S., Hamada, L., Brazil, E. V., Soares, E., & Shinohara, H. (2024). Self-bart : A transformer-based molecular representation model using selfies. https:\/\/arxiv.org\/abs\/2410.12348."},{"key":"10.1016\/j.neunet.2026.108567_bib0140","unstructured":"Priyadarsini, I., Takeda, S., Hamada, L., Brazil, E. V., Soares, E., & Shinohara, H. (2025b). SELFIES-TED : A robust transformer model for molecular representation using SELFIES. https:\/\/openreview.net\/forum?id=uPj9oBH80V."},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0141","doi-asserted-by":"crossref","first-page":"61","DOI":"10.1038\/s41524-025-01538-0","article-title":"Foundation models for materials discovery\u2013current state and future directions","volume":"11","author":"Pyzer-Knapp","year":"2025","journal-title":"NPJ Computational Materials"},{"issue":"3","key":"10.1016\/j.neunet.2026.108567_bib0142","first-page":"1610","article-title":"The GEOFON program in 2020","volume":"92","author":"Quinteros","year":"2021","journal-title":"Seismological Society of America"},{"key":"10.1016\/j.neunet.2026.108567_bib0143","series-title":"The United States Geological Survey, 1879\u20131989","volume":"vol. 1050","author":"Rabbitt","year":"1989"},{"issue":"8","key":"10.1016\/j.neunet.2026.108567_bib0144","first-page":"9","article-title":"Language models are unsupervised multitask learners","volume":"1","author":"Radford","year":"2019","journal-title":"OpenAI Blog"},{"key":"10.1016\/j.neunet.2026.108567_bib0145","doi-asserted-by":"crossref","first-page":"686","DOI":"10.1016\/j.jcp.2018.10.045","article-title":"Physics-informed neural networks: A deep learning framework for solving forward and inverse problems involving nonlinear partial differential equations","volume":"378","author":"Raissi","year":"2019","journal-title":"Journal of Computational Physics"},{"key":"10.1016\/j.neunet.2026.108567_bib0146","series-title":"ICLR 2023 workshop on physics for machine learning","article-title":"Convolutional neural operators","author":"Raonic","year":"2023"},{"issue":"6","key":"10.1016\/j.neunet.2026.108567_bib0147","doi-asserted-by":"crossref","DOI":"10.1029\/2023MS004019","article-title":"Weatherbench 2: A benchmark for the next generation of data-driven global weather models","volume":"16","author":"Rasp","year":"2024","journal-title":"Journal of Advances in Modeling Earth Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0148","unstructured":"Reed, S., Zolna, K., Parisotto, E., Colmenarejo, S. G., Novikov, A., Barth-Maron, G., Gimenez, M., Sulsky, Y., Kay, J., Springenberg, J. T. et al. (2022). A generalist agent. arXiv preprint arXiv: 2205.06175."},{"issue":"12","key":"10.1016\/j.neunet.2026.108567_bib0149","doi-asserted-by":"crossref","first-page":"1256","DOI":"10.1038\/s42256-022-00580-7","article-title":"Large-scale chemical language representations capture molecular structure and properties","volume":"4","author":"Ross","year":"2022","journal-title":"Nature Machine Intelligence"},{"key":"10.1016\/j.neunet.2026.108567_bib0150","doi-asserted-by":"crossref","first-page":"154","DOI":"10.1016\/j.rse.2014.02.001","article-title":"Landsat-8: Science and product vision for terrestrial global change research","volume":"145","author":"Roy","year":"2014","journal-title":"Remote Sensing of Environment"},{"key":"10.1016\/j.neunet.2026.108567_bib0151","unstructured":"Roy, S., Schmude, J., Lal, R., Gaur, V., Freitag, M., Kuehnert, J., van Kessel, T., Hegde, D. V., Mu\u00f1oz-Jaramillo, A., Jakubik, J. et al. (2025). Surya: Foundation model for heliophysics. arXiv preprint arXiv: 2508.14112."},{"key":"10.1016\/j.neunet.2026.108567_bib0152","first-page":"55565","article-title":"Are emergent abilities of large language models a mirage?","volume":"36","author":"Schaeffer","year":"2023","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0153","unstructured":"Schmude, J., Roy, S., Trojak, W., Jakubik, J., Civitarese, D. S., Singh, S., Kuehnert, J., Ankur, K., Gupta, A., Phillips, C. E. et al. (2024). Prithvi WXC: Foundation model for weather and climate. arXiv preprint arXiv: 2403.07187."},{"key":"10.1016\/j.neunet.2026.108567_bib0154","unstructured":"Shen, J., Marwah, T., & Talwalkar, A. (2024). Ups: Efficiently building foundation models for pde solving via cross-modal adaptation. arXiv preprint arXiv: 2403.07187."},{"key":"10.1016\/j.neunet.2026.108567_bib0155","series-title":"Proceedings of the IEEE\/CVF winter conference on applications of computer vision","first-page":"3531","article-title":"Efficient attention: Attention with linear complexities","author":"Shen","year":"2021"},{"key":"10.1016\/j.neunet.2026.108567_bib0156","unstructured":"Sheng, H., Wu, X., Si, X., Li, J., Zhang, S., & Duan, X. (2023). Seismic foundation model (SFM): A new generation deep learning model in geophysics. arxiv."},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0157","doi-asserted-by":"crossref","first-page":"68","DOI":"10.1109\/MSP.2021.3118904","article-title":"A physics-informed neural network for quantifying the microstructural properties of polycrystalline nickel using ultrasound data: A promising approach for solving inverse problems","volume":"39","author":"Shukla","year":"2021","journal-title":"IEEE Signal Processing Magazine"},{"key":"10.1016\/j.neunet.2026.108567_bib0158","doi-asserted-by":"crossref","DOI":"10.1016\/j.jcp.2021.110683","article-title":"Parallel physics-informed neural networks via domain decomposition","volume":"447","author":"Shukla","year":"2021","journal-title":"Journal of Computational Physics"},{"key":"10.1016\/j.neunet.2026.108567_bib0159","first-page":"1","article-title":"SeisCLIP: A seismology foundation model pre-trained by multimodal data for multipurpose seismic feature extraction","volume":"62","author":"Si","year":"2024","journal-title":"IEEE Transactions on Geoscience and Remote Sensing"},{"key":"10.1016\/j.neunet.2026.108567_bib0160","unstructured":"Soares, E., Brazil, E. V., Shirasuna, V. Y., Zubarev, D., Cerqueira, R., & Schmidt, K. (2024a). SMI-TED: A large-scale foundation model for materials and Chemistry. arXiv preprint arXiv: 2003.12140."},{"key":"10.1016\/j.neunet.2026.108567_bib0161","series-title":"AI for accelerated materials design - neurips 2024","article-title":"Multi-view mixture-of-experts for predicting molecular properties using SMILES, SELFIES, and graph-based representations","author":"Soares","year":"2024"},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0162","doi-asserted-by":"crossref","first-page":"8","DOI":"10.1038\/s44387-025-00009-7","article-title":"A mamba-based foundation model for materials","volume":"1","author":"Soares","year":"2025","journal-title":"NPJ Artificial Intelligence"},{"key":"10.1016\/j.neunet.2026.108567_bib0163","unstructured":"S\u00f8nderby, C. K., Espeholt, L., Heek, J., Dehghani, M., Oliver, A., Salimans, T., Agrawal, S., Hickey, J., & Kalchbrenner, N. (2020). MetNet: A neural weather model for precipitation forecasting. arXiv preprint arXiv: 2003.12140."},{"key":"10.1016\/j.neunet.2026.108567_bib0164","doi-asserted-by":"crossref","DOI":"10.1109\/TMI.2025.3567247","article-title":"Dino-REG: Efficient multimodal image registration with distilled features","author":"Song","year":"2025","journal-title":"IEEE Transactions on Medical Imaging"},{"key":"10.1016\/j.neunet.2026.108567_bib0165","first-page":"1","article-title":"Global anthropogenic emissions (CAMS-GLOB-ANT) for the copernicus atmosphere monitoring service simulations of air quality forecasts and reanalyses","volume":"2023","author":"Soulie","year":"2023","journal-title":"Earth System Science Data Discussions"},{"issue":"18","key":"10.1016\/j.neunet.2026.108567_bib0166","doi-asserted-by":"crossref","first-page":"5882","DOI":"10.1021\/acs.jpcb.5b00689","article-title":"The general AMBER force field (GAFF) can accurately predict thermodynamic and transport properties of many ionic liquids","volume":"119","author":"Sprenger","year":"2015","journal-title":"The Journal of Physical Chemistry B"},{"key":"10.1016\/j.neunet.2026.108567_bib0167","first-page":"71242","article-title":"Towards foundation models for scientific machine learning: Characterizing scaling and transfer behavior","volume":"36","author":"Subramanian","year":"2023","journal-title":"Advances in Neural Information Processing Systems"},{"issue":"3","key":"10.1016\/j.neunet.2026.108567_bib0168","doi-asserted-by":"crossref","DOI":"10.1103\/PhysRevE.111.035304","article-title":"Towards a foundation model for partial differential equations: Multioperator learning and extrapolation","volume":"111","author":"Sun","year":"2025","journal-title":"Physical Review E"},{"key":"10.1016\/j.neunet.2026.108567_bib0169","unstructured":"Sun, J., Zhang, Z., & Schaeffer, H. (2024). Lemon: Learning to learn multi-operator networks. arXiv preprint arXiv: 2408.16168."},{"key":"10.1016\/j.neunet.2026.108567_bib0170","article-title":"Sequence to sequence learning with neural networks","volume":"27","author":"Sutskever","year":"2014","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0171","unstructured":"Szwarcman, D., Roy, S., Fraccaro, P., G\u00edslason, \u00de. E., Blumenstiel, B., Ghosal, R., de Oliveira, P. H., de Sousa, A. J. L., Sedona, R., Kang, Y. et al. (2024). Prithvi-eo-2.0: A versatile multi-temporal foundation model for earth observation applications. arXiv preprint arXiv: 2412.02732."},{"key":"10.1016\/j.neunet.2026.108567_bib0172","first-page":"1596","article-title":"PDEBench: An extensive benchmark for scientific machine learning","volume":"35","author":"Takamoto","year":"2022","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0173","series-title":"Proceedings of the AAAI conference on artificial intelligence","first-page":"15376","article-title":"Foundation model for material science","volume":"vol. 37","author":"Takeda","year":"2023"},{"key":"10.1016\/j.neunet.2026.108567_bib0174","series-title":"AI for accelerated materials design-NeurIPS 2023 workshop","article-title":"Multi-modal foundation model for material design","author":"Takeda","year":"2023"},{"key":"10.1016\/j.neunet.2026.108567_bib0175","series-title":"International conference on machine learning","first-page":"6105","article-title":"Efficientnet: Rethinking model scaling for convolutional neural networks","author":"Tan","year":"2019"},{"key":"10.1016\/j.neunet.2026.108567_bib0176","unstructured":"Touvron, H., Lavril, T., Izacard, G., Martinet, X., Lachaux, M.-A., Lacroix, T., Rozi\u00e8re, B., Goyal, N., Hambro, E., Azhar, F. et al. (2023). Llama: Open and efficient foundation language models. arXiv preprint arXiv: 2302.13971."},{"key":"10.1016\/j.neunet.2026.108567_bib0177","unstructured":"Tschannen, M., Gritsenko, A., Wang, X., Naeem, M. F., Alabdulmohsin, I., Parthasarathy, N., Evans, T., Beyer, L., Xia, Y., Mustafa, B. et al. (2025). Siglip 2: Multilingual vision-language encoders with improved semantic understanding, localization, and dense features. arXiv preprint arXiv: 2502.14786."},{"key":"10.1016\/j.neunet.2026.108567_bib0178","unstructured":"Tseng, G., Cartuyvels, R., Zvonkov, I., Purohit, M., Rolnick, D., & Kerner, H. (2023). Lightweight, pre-trained transformers for remote sensing timeseries. arXiv preprint arXiv: 2304.14065."},{"key":"10.1016\/j.neunet.2026.108567_bib0179","doi-asserted-by":"crossref","first-page":"232","DOI":"10.1016\/j.isprsjprs.2022.01.021","article-title":"Can we detect more ephemeral floods with higher density harmonized landsat sentinel 2 data compared to landsat 8 alone?","volume":"185","author":"Tulbure","year":"2022","journal-title":"ISPRS Journal of Photogrammetry and Remote Sensing"},{"key":"10.1016\/j.neunet.2026.108567_bib0180","article-title":"Attention is all you need","volume":"30","author":"Vaswani","year":"2017","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0181","first-page":"22009","article-title":"Sevir: A storm event imagery dataset for deep learning applications in radar and satellite meteorology","volume":"33","author":"Veillette","year":"2020","journal-title":"Advances in Neural Information Processing Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0182","doi-asserted-by":"crossref","unstructured":"Wang, X., Choi, J.-Y., Kurihaya, T., Lyngaas, I., Yoon, H.-J., Fan, M., Nafi, N. M., Tsaris, A., Aji, A. M., Hossain, M. et al. (2025). Orbit-2: Scaling exascale vision foundation models for weather and climate downscaling. arXiv preprint arXiv: 2505.04802.","DOI":"10.1145\/3712285.3771989"},{"issue":"3","key":"10.1016\/j.neunet.2026.108567_bib0183","doi-asserted-by":"crossref","first-page":"98","DOI":"10.1109\/MGRS.2023.3281651","article-title":"Ssl4eo-s12: A large-scale multimodal, multitemporal dataset for self-supervised learning in earth observation [Software and data sets]","volume":"11","author":"Wang","year":"2023","journal-title":"IEEE Geoscience and Remote Sensing Magazine"},{"issue":"10","key":"10.1016\/j.neunet.2026.108567_bib0184","doi-asserted-by":"crossref","DOI":"10.1029\/2021MS002954","article-title":"Climatebench v1. 0: A benchmark for data-driven climate projections","volume":"14","author":"Watson-Parris","year":"2022","journal-title":"Journal of Advances in Modeling Earth Systems"},{"key":"10.1016\/j.neunet.2026.108567_bib0185","unstructured":"Wei, J., Tay, Y., Bommasani, R., Raffel, C., Zoph, B., Borgeaud, S., Yogatama, D., Bosma, M., Zhou, D., Metzler, D. et al. (2022). Emergent abilities of large language models. arXiv preprint arXiv: 2206.07682."},{"issue":"12","key":"10.1016\/j.neunet.2026.108567_bib0186","doi-asserted-by":"crossref","DOI":"10.1007\/s11704-025-50480-3","article-title":"Federated reasoning LLMs: A survey","volume":"19","author":"Wei","year":"2025","journal-title":"Frontiers of Computer Science"},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0187","doi-asserted-by":"crossref","first-page":"31","DOI":"10.1021\/ci00057a005","article-title":"Smiles, a chemical language and information system. 1. Introduction to methodology and encoding rules","volume":"28","author":"Weininger","year":"1988","journal-title":"Journal of Chemical Information and Computer Sciences"},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0188","doi-asserted-by":"crossref","first-page":"67","DOI":"10.1109\/4235.585893","article-title":"No free lunch theorems for optimization","volume":"1","author":"Wolpert","year":"1997","journal-title":"IEEE Transactions on Evolutionary Computation"},{"key":"10.1016\/j.neunet.2026.108567_bib0189","unstructured":"Woo, G., Liu, C., Kumar, A., Xiong, C., Savarese, S., & Sahoo, D. (2024). Unified training of universal time series forecasting transformers,."},{"issue":"2A","key":"10.1016\/j.neunet.2026.108567_bib0190","doi-asserted-by":"crossref","first-page":"491","DOI":"10.1785\/0220180312","article-title":"Convolutional neural network for seismic phase classification, performance demonstration over a local seismic network","volume":"90","author":"Woollam","year":"2019","journal-title":"Seismological Research Letters"},{"key":"10.1016\/j.neunet.2026.108567_bib0191","unstructured":"Wu, H., Hu, T., Liu, Y., Zhou, H., Wang, J., & Long, M. (2022). TimesNet: Temporal 2d-variation modeling for general time series analysis. arXiv preprint arXiv: 2210.02186."},{"issue":"abs\/1703.00564","key":"10.1016\/j.neunet.2026.108567_bib0192","article-title":"MoleculeNet: A benchmark for molecular machine learning","author":"Wu","year":"2017","journal-title":"CoRR"},{"key":"10.1016\/j.neunet.2026.108567_bib0193","unstructured":"Xiong, Z., Wang, Y., Zhang, F., Stewart, A. J., Hanna, J., Borth, D., Papoutsis, I., Saux, B. L., Camps-Valls, G., & Zhu, X. X. (2024). Neural plasticity-inspired multimodal foundation model for earth observation. arXiv preprint arXiv: 2403.15356."},{"issue":"10","key":"10.1016\/j.neunet.2026.108567_bib0194","doi-asserted-by":"crossref","first-page":"852","DOI":"10.1038\/s42256-022-00534-z","article-title":"scBERT as a large-scale pretrained deep language model for cell type annotation of single-cell RNA-seq data","volume":"4","author":"Yang","year":"2022","journal-title":"Nature Machine Intelligence"},{"issue":"39","key":"10.1016\/j.neunet.2026.108567_bib0195","doi-asserted-by":"crossref","DOI":"10.1073\/pnas.2310142120","article-title":"In-context operator learning with data prompts for differential equation problems","volume":"120","author":"Yang","year":"2023","journal-title":"Proceedings of the National Academy of Sciences"},{"key":"10.1016\/j.neunet.2026.108567_bib0196","doi-asserted-by":"crossref","DOI":"10.1016\/j.jcp.2024.113379","article-title":"Pde generalization of in-context operator networks: A study on 1D scalar nonlinear conservation laws","volume":"519","author":"Yang","year":"2024","journal-title":"Journal of Computational Physics"},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0197","doi-asserted-by":"crossref","first-page":"83","DOI":"10.26599\/CVM.2025.9450383","article-title":"Swin3D: A pretrained transformer backbone for 3D indoor scene understanding","volume":"11","author":"Yang","year":"2025","journal-title":"Computational Visual Media"},{"key":"10.1016\/j.neunet.2026.108567_bib0198","doi-asserted-by":"crossref","unstructured":"Yuan, J., Gao, H., Dai, D., Luo, J., Zhao, L., Zhang, Z., Xie, Z., Wei, Y. X., Wang, L., Xiao, Z. et al. (2025). Native sparse attention: Hardware-aligned and natively trainable sparse attention. arXiv preprint arXiv preprint arXiv: 2411.05420.","DOI":"10.18653\/v1\/2025.acl-long.1126"},{"issue":"D1","key":"10.1016\/j.neunet.2026.108567_bib0199","doi-asserted-by":"crossref","first-page":"D1180","DOI":"10.1093\/nar\/gkad1004","article-title":"The chEMBL database in 2023: A drug discovery platform spanning multiple bioactivity data types and time periods","volume":"52","author":"Zdrazil","year":"2023","journal-title":"Nucleic Acids Research"},{"key":"10.1016\/j.neunet.2026.108567_bib0200","doi-asserted-by":"crossref","DOI":"10.1038\/s41586-025-08628-5","article-title":"A generative model for inorganic materials design","author":"Zeni","year":"2025","journal-title":"Nature"},{"key":"10.1016\/j.neunet.2026.108567_bib0201","unstructured":"Zhang, T., Kishore, V., Wu, F., Weinberger, K. Q., & Artzi, Y. (2019). BertScore: Evaluating text generation with BERT. arXiv preprint arXiv: 1904.09675,."},{"key":"10.1016\/j.neunet.2026.108567_bib0202","unstructured":"Zhang, Y., & Gilpin, W. (2024). Zero-shot forecasting of chaotic systems. arXiv preprint arXiv: 2409.15771,."},{"key":"10.1016\/j.neunet.2026.108567_bib0203","doi-asserted-by":"crossref","unstructured":"Zhang, Y., Menon, S. S., Cheng, L., Gnanaskandan, A., & Jagtap, A. D. (2026b). BubbleOKAN: A physics-informed interpretable neural operator for high-frequency bubble dynamics. Computer Methods in Applied Mechanics and Engineering, 450, 118667.","DOI":"10.1016\/j.cma.2025.118667"},{"key":"10.1016\/j.neunet.2026.108567_bib0204","unstructured":"Zhao, X., Zhou, Z., Zhang, W., Liu, Y., Chen, X., Gong, J., Chen, H., Fei, B., Chen, S., Ouyang, W. et al. (2024). WeatherGFM: Learning a weather generalist foundation model via in-context learning. arXiv preprint arXiv: 2411.05420,."},{"key":"10.1016\/j.neunet.2026.108567_bib0205","series-title":"Proceedings of the AAAI conference on artificial intelligence","first-page":"11106","article-title":"Informer: Beyond efficient transformer for long sequence time-series forecasting","volume":"vol. 35","author":"Zhou","year":"2021"},{"issue":"3","key":"10.1016\/j.neunet.2026.108567_bib0206","doi-asserted-by":"crossref","first-page":"328","DOI":"10.3390\/rs11030328","article-title":"Monitoring landscape dynamics in central US grasslands with harmonized landsat-8 and sentinel-2 time series data","volume":"11","author":"Zhou","year":"2019","journal-title":"Remote Sensing"},{"key":"10.1016\/j.neunet.2026.108567_bib0207","first-page":"43322","article-title":"One fits all: Power general time series analysis by pretrained LM","volume":"36","author":"Zhou","year":"2023","journal-title":"Advances in Neural Information Processing Systems"},{"issue":"1","key":"10.1016\/j.neunet.2026.108567_bib0208","doi-asserted-by":"crossref","first-page":"43","DOI":"10.1109\/JPROC.2020.3004555","article-title":"A comprehensive survey on transfer learning","volume":"109","author":"Zhuang","year":"2020","journal-title":"Proceedings of the IEEE"},{"key":"10.1016\/j.neunet.2026.108567_bib0209","unstructured":"Ziegler, M., Posada-Moreno, A. F., Solowjow, F., & Trimpe, S. (2024). On foundation models for dynamical systems from purely synthetic data. arXiv preprint arXiv: 2412.00395."},{"key":"10.1016\/j.neunet.2026.108567_bib0210","unstructured":"Zurich, S. S. S. A. E. (1983). National seismic networks of Switzerland. http:\/\/networks.seismo.ethz.ch\/networks\/ch\/."}],"container-title":["Neural Networks"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S0893608026000304?httpAccept=text\/xml","content-type":"text\/xml","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S0893608026000304?httpAccept=text\/plain","content-type":"text\/plain","content-version":"vor","intended-application":"text-mining"}],"deposited":{"date-parts":[[2026,4,6]],"date-time":"2026-04-06T18:40:53Z","timestamp":1775500853000},"score":1,"resource":{"primary":{"URL":"https:\/\/linkinghub.elsevier.com\/retrieve\/pii\/S0893608026000304"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2026,6]]},"references-count":210,"alternative-id":["S0893608026000304"],"URL":"https:\/\/doi.org\/10.1016\/j.neunet.2026.108567","relation":{},"ISSN":["0893-6080"],"issn-type":[{"value":"0893-6080","type":"print"}],"subject":[],"published":{"date-parts":[[2026,6]]},"assertion":[{"value":"Elsevier","name":"publisher","label":"This article is maintained by"},{"value":"On scientific foundation models: Rigorous definitions, key applications, and a comprehensive survey","name":"articletitle","label":"Article Title"},{"value":"Neural Networks","name":"journaltitle","label":"Journal Title"},{"value":"https:\/\/doi.org\/10.1016\/j.neunet.2026.108567","name":"articlelink","label":"CrossRef DOI link to publisher maintained version"},{"value":"article","name":"content_type","label":"Content Type"},{"value":"\u00a9 2026 The Author(s). Published by Elsevier Ltd.","name":"copyright","label":"Copyright"}],"article-number":"108567"}}