{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,11,15]],"date-time":"2025-11-15T07:28:39Z","timestamp":1763191719327,"version":"3.45.0"},"reference-count":47,"publisher":"IEEE","license":[{"start":{"date-parts":[[2025,6,30]],"date-time":"2025-06-30T00:00:00Z","timestamp":1751241600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,6,30]],"date-time":"2025-06-30T00:00:00Z","timestamp":1751241600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100012166","name":"National Key Research and Development Program of China","doi-asserted-by":"publisher","id":[{"id":"10.13039\/501100012166","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025,6,30]]},"DOI":"10.1109\/ijcnn64981.2025.11228414","type":"proceedings-article","created":{"date-parts":[[2025,11,14]],"date-time":"2025-11-14T18:46:15Z","timestamp":1763145975000},"page":"1-8","source":"Crossref","is-referenced-by-count":0,"title":["RecLGB: Enhancing LightGBM using Recursive VAE with Mixed Attention for Time-Series Forecasting"],"prefix":"10.1109","author":[{"given":"Yuxin","family":"Mei","sequence":"first","affiliation":[{"name":"East China Normal University,Shanghai Key Laboratory of Trustworthy Computing,Shanghai,China"}]},{"given":"Xu","family":"Han","sequence":"additional","affiliation":[{"name":"East China Normal University,Shanghai Key Laboratory of Trustworthy Computing,Shanghai,China"}]},{"given":"Zhongming","family":"Han","sequence":"additional","affiliation":[{"name":"Beijing Technology and Business University,Beijing,China"}]},{"given":"Li","family":"Han","sequence":"additional","affiliation":[{"name":"East China Normal University,Shanghai Key Laboratory of Trustworthy Computing,Shanghai,China"}]},{"given":"Jing","family":"Liu","sequence":"additional","affiliation":[{"name":"East China Normal University,Shanghai Key Laboratory of Trustworthy Computing,Shanghai,China"}]}],"member":"263","reference":[{"article-title":"An empirical evaluation of generic convolutional and recurrent networks for sequence modeling","year":"2018","author":"Bai","key":"ref1"},{"article-title":"Neural machine translation by jointly learning to align and translate","year":"2014","author":"Bahdanau","key":"ref2"},{"key":"ref3","first-page":"30518","article-title":"SutraNets: Sub-series autoregressive networks for long-sequence, probabilistic forecasting","volume":"36","author":"Bergsma","year":"2023","journal-title":"Adv. Neural Inf. Process. Syst"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1057\/9781137291264_6"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.3390\/s24051522"},{"article-title":"Pathformer: Multi-scale transformers with adaptive pathways for time series forecasting","year":"2024","author":"Chen","key":"ref6"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v34i04.5766"},{"article-title":"Generating long sequences with sparse transformers","year":"2019","author":"Child","key":"ref8"},{"article-title":"Gated feedback recurrent neural networks","volume-title":"Proc. Int. Conf. Mach. Learn. (ICML)","author":"Chung","key":"ref9"},{"article-title":"Long-term forecasting with TIDE: Time-series dense encoder","year":"2023","author":"Das","key":"ref10"},{"article-title":"BERT: Pre-training of deep bidirectional transformers for language understanding","year":"2018","author":"Devlin","key":"ref11"},{"article-title":"An image is worth 16x16 words: Transformers for image recognition at scale","year":"2020","author":"Dosovitskiy","key":"ref12"},{"article-title":"Do we really need deep learning models for time series forecasting?","year":"2021","author":"Elsayed","key":"ref13"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1126\/science.1127647"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1142\/S0218488598000094"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1145\/3357384.3358132"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1145\/3637528.3671969"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1002\/smr.2430"},{"key":"ref19","article-title":"LightGBM: A highly efficient gradient boosting decision tree","volume":"30","author":"Ke","year":"2017","journal-title":"Adv. Neural Inf. Process. Syst. (NeurIPS)"},{"article-title":"Reversible instance normalization for accurate time-series forecasting against distribution shift","volume-title":"Proc. Int. Conf. Learn. Represent. (ICLR)","author":"Kim","key":"ref20"},{"article-title":"Auto-encoding variational Bayes","year":"2013","author":"Kingma","key":"ref21"},{"article-title":"Reformer: The efficient transformer","year":"2020","author":"Kitaev","key":"ref22"},{"key":"ref23","first-page":"15371","article-title":"Deep Rao-Blackwellised particle filters for time series forecasting","volume":"33","author":"Kurle","year":"2020","journal-title":"Adv. Neural Inf. Process. Syst. (NeurIPS)"},{"key":"ref24","article-title":"Enhancing the locality and breaking the memory bottleneck of transformer on time series forecasting","volume":"32","author":"Li","year":"2019","journal-title":"Adv. Neural Inf. Process. Syst. (NeurIPS)"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i5.16542"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.24963\/ijcai.2018\/476"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.1016\/j.ijforecast.2021.03.012"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2021.3134792"},{"key":"ref29","first-page":"5816","article-title":"SCINet: Time series modeling and forecasting with sample convolution and interaction","volume":"35","author":"Liu","year":"2022","journal-title":"Adv. Neural Inf. Process. Syst. (NeurIPS)"},{"article-title":"Moderntcn: A modern pure convolution structure for general time series analysis","volume-title":"Proc. Int. Conf. Learn. Represent. (ICLR)","author":"Luo","key":"ref30"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D15-1166"},{"article-title":"Efficient and effective time-series forecasting with spiking neural networks","year":"2024","author":"Lv","key":"ref32"},{"article-title":"MEGA: Moving average equipped gated attention","year":"2022","author":"Ma","key":"ref33"},{"key":"ref34","doi-asserted-by":"publisher","DOI":"10.24963\/ijcai.2017\/366"},{"issue":"8","key":"ref35","first-page":"9","article-title":"Language models are unsupervised multitask learners","volume":"1","author":"Radford","year":"2019","journal-title":"OpenAI Blog"},{"key":"ref36","article-title":"Efficient temporal processing with spiking neural networks","author":"Salaj","year":"2021","journal-title":"Adv. Neural Inf. Process. Syst. (NeurIPS)"},{"key":"ref37","doi-asserted-by":"publisher","DOI":"10.1016\/j.knosys.2023.110666"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1007\/s10994-019-05815-0"},{"key":"ref39","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v32i1.11635"},{"key":"ref40","doi-asserted-by":"publisher","DOI":"10.1016\/j.neucom.2006.06.015"},{"key":"ref41","article-title":"Attention is all you need","author":"Vaswani","year":"2017","journal-title":"Adv. Neural Inf. Process. Syst. (NeurIPS)"},{"article-title":"Micn: Multi-scale local and global context modeling for long-term series forecasting","volume-title":"Proc. 11th Int. Conf. Learn. Representations (ICLR)","author":"Wang","key":"ref42"},{"key":"ref43","first-page":"22419","article-title":"Autoformer: Decomposition transformers with auto-correlation for long-term series forecasting","volume":"34","author":"Wu","year":"2021","journal-title":"Adv. Neural Inf. Process. Syst. (NeurIPS)"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v36i8.20881"},{"key":"ref45","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v37i9.26317"},{"article-title":"Crossformer: Transformer utilizing cross-dimension dependency for multivariate time series forecasting","volume-title":"Proc. 11th Int. Conf. Learn. Represent. (ICLR)","author":"Zhang","key":"ref46"},{"key":"ref47","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i12.17325"}],"event":{"name":"2025 International Joint Conference on Neural Networks (IJCNN)","start":{"date-parts":[[2025,6,30]]},"location":"Rome, Italy","end":{"date-parts":[[2025,7,5]]}},"container-title":["2025 International Joint Conference on Neural Networks (IJCNN)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/11227166\/11227148\/11228414.pdf?arnumber=11228414","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,11,15]],"date-time":"2025-11-15T07:27:02Z","timestamp":1763191622000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11228414\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,6,30]]},"references-count":47,"URL":"https:\/\/doi.org\/10.1109\/ijcnn64981.2025.11228414","relation":{},"subject":[],"published":{"date-parts":[[2025,6,30]]}}}