{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,11,5]],"date-time":"2025-11-05T11:33:55Z","timestamp":1762342435566,"version":"3.32.0"},"reference-count":8,"publisher":"IEEE","license":[{"start":{"date-parts":[[2024,9,20]],"date-time":"2024-09-20T00:00:00Z","timestamp":1726790400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,9,20]],"date-time":"2024-09-20T00:00:00Z","timestamp":1726790400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024,9,20]]},"DOI":"10.1109\/icbase63199.2024.10762467","type":"proceedings-article","created":{"date-parts":[[2024,11,26]],"date-time":"2024-11-26T18:45:22Z","timestamp":1732646722000},"page":"816-820","source":"Crossref","is-referenced-by-count":1,"title":["A Long Time Series Prediction Model Based on CNN and Linear Combination"],"prefix":"10.1109","author":[{"given":"Wuye","family":"Lv","sequence":"first","affiliation":[{"name":"Shanghai Maritime University,College of Information Engineering,Shanghai,China"}]},{"given":"Xiaoxia","family":"Huang","sequence":"additional","affiliation":[{"name":"Shanghai Maritime University,College of Information Engineering,Shanghai,China"}]}],"member":"263","reference":[{"key":"ref1","article-title":"Attention is all you need","author":"Vaswani","year":"2017","journal-title":"Neural Information Processing Systems"},{"key":"ref2","article-title":"Informer: Beyond efficient transformer for long sequence time-series forecasting","volume":"abs\/2012.07436","author":"Zhou","year":"2020","journal-title":"ArXiv"},{"key":"ref3","article-title":"A time series is worth 64 words: Long-term forecasting with transformers","volume":"abs\/2211.14730","author":"Nie","year":"2022","journal-title":"ArXiv"},{"key":"ref4","doi-asserted-by":"crossref","DOI":"10.1609\/aaai.v37i9.26317","article-title":"Are transformers effective for time series forecasting?","volume-title":"AAAI Conference on Artificial Intelligence","author":"Zeng"},{"key":"ref5","article-title":"An empirical evaluation of generic convolutional and recurrent networks for sequence modeling","volume":"abs\/1803.01271","author":"Bai","year":"2018","journal-title":"ArXiv"},{"key":"ref6","article-title":"Patchmixer: A patch-mixing architecture for long-term time series forecasting","volume":"abs\/2310.00655","author":"Gong","year":"2023","journal-title":"ArXiv"},{"key":"ref7","article-title":"Demt: Deformable mixer transformer for multitask learning of dense prediction","volume":"abs\/2301.03461","author":"Yang","year":"2023","journal-title":"ArXiv"},{"article-title":"Autoformer: Decomposition transformers with auto-correlation for long-term series forecasting","volume-title":"Neural Information Processing Systems","author":"Wu","key":"ref8"}],"event":{"name":"2024 5th International Conference on Big Data &amp; Artificial Intelligence &amp; Software Engineering (ICBASE)","start":{"date-parts":[[2024,9,20]]},"location":"Wenzhou, China","end":{"date-parts":[[2024,9,22]]}},"container-title":["2024 5th International Conference on Big Data &amp;amp; Artificial Intelligence &amp;amp; Software Engineering (ICBASE)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/10761118\/10761986\/10762467.pdf?arnumber=10762467","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,1,10]],"date-time":"2025-01-10T19:50:25Z","timestamp":1736538625000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10762467\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,9,20]]},"references-count":8,"URL":"https:\/\/doi.org\/10.1109\/icbase63199.2024.10762467","relation":{},"subject":[],"published":{"date-parts":[[2024,9,20]]}}}