{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,6,24]],"date-time":"2025-06-24T07:10:09Z","timestamp":1750749009777,"version":"3.41.0"},"reference-count":19,"publisher":"IEEE","license":[{"start":{"date-parts":[[2025,5,5]],"date-time":"2025-05-05T00:00:00Z","timestamp":1746403200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,5,5]],"date-time":"2025-05-05T00:00:00Z","timestamp":1746403200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025,5,5]]},"DOI":"10.1109\/cscwd64889.2025.11033534","type":"proceedings-article","created":{"date-parts":[[2025,6,23]],"date-time":"2025-06-23T17:24:40Z","timestamp":1750699480000},"page":"379-384","source":"Crossref","is-referenced-by-count":0,"title":["FFDFormer: A Fourier Decomposed Transformer with Inter-Intra Variable Fourier Dependencies for Multivariate Time Series Forecasting"],"prefix":"10.1109","author":[{"given":"Zhen","family":"Dong","sequence":"first","affiliation":[{"name":"Tianjin University of Technology,Tianjin Key Laboratory of Intelligence Computing and Novel Software Technology,Tianjin,China"}]},{"given":"Qing","family":"Yu","sequence":"additional","affiliation":[{"name":"Tianjin University of Technology,Tianjin Key Laboratory of Intelligence Computing and Novel Software Technology,Tianjin,China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1016\/j.ijforecast.2020.06.008"},{"key":"ref2","article-title":"Segrnn: Segment recurrent neural network for long-term time series forecasting","author":"Lin","year":"2023","journal-title":"arXiv preprint arXiv"},{"key":"ref3","article-title":"Timesnet: Temporal 2d-variation modeling for general time series analysis","author":"Wu","year":"2022","journal-title":"arXiv preprint arXiv"},{"key":"ref4","article-title":"Micn: Multi-scale local and global context modeling for long-term series forecasting","volume-title":"The eleventh international conference on learning representations","author":"Wang","year":"2023"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v37i9.26317"},{"key":"ref6","article-title":"A time series is worth 64 words: Long-term forecasting with transformers","author":"Nie","year":"2022","journal-title":"arXiv preprint arXiv"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i12.17325"},{"key":"ref8","article-title":"Crossformer: Transformer utilizing cross-dimension dependency for multivariate time series forecasting","volume-title":"The Eleventh International Conference on Learning Representations","author":"Zhang","year":"2022"},{"key":"ref9","article-title":"Deep transformer models for time series forecasting: The influenza prevalence case","author":"Wu","year":"2020","journal-title":"arXiv preprint arXiv"},{"key":"ref10","article-title":"N-beats: Neural basis expansion analysis for interpretable time series forecasting","author":"Oreshkin","year":"2019","journal-title":"arXiv preprint arXiv"},{"key":"ref11","first-page":"22419","article-title":"Autoformer: De-composition transformers with auto-correlation for long-term series forecasting","volume":"34","author":"Wu","year":"2021","journal-title":"Advances in neural information processing systems"},{"key":"ref12","first-page":"27268","article-title":"Fedformer: Frequency enhanced decomposed transformer for long-term series forecasting","volume-title":"International conference on machine learning","author":"Zhou","year":"2022"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2022.naacl-main.319"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1016\/j.sigpro.2020.107852"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-319-71249-9_47"},{"key":"ref16","article-title":"Rethinking attention with performers","author":"Choromanski","year":"2020","journal-title":"arXiv preprint arXiv"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1090\/S0025-5718-1965-0178586-1"},{"key":"ref18","article-title":"itransformer: Inverted transformers are effective for time series forecasting","volume-title":"arXiv preprint arXiv","author":"Liu","year":"2023"},{"key":"ref19","article-title":"Pyraformer: Low-complexity pyramidal attention for long-range time series modeling and forecasting","volume-title":"International conference on learning representations","author":"Liu","year":"2021"}],"event":{"name":"2025 28th International Conference on Computer Supported Cooperative Work in Design (CSCWD)","start":{"date-parts":[[2025,5,5]]},"location":"Compiegne, France","end":{"date-parts":[[2025,5,7]]}},"container-title":["2025 28th International Conference on Computer Supported Cooperative Work in Design (CSCWD)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/11033175\/11033221\/11033534.pdf?arnumber=11033534","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,24]],"date-time":"2025-06-24T06:35:46Z","timestamp":1750746946000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11033534\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,5,5]]},"references-count":19,"URL":"https:\/\/doi.org\/10.1109\/cscwd64889.2025.11033534","relation":{},"subject":[],"published":{"date-parts":[[2025,5,5]]}}}