{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,1,14]],"date-time":"2026-01-14T17:40:41Z","timestamp":1768412441471,"version":"3.49.0"},"publisher-location":"New York, NY, USA","reference-count":41,"publisher":"ACM","license":[{"start":{"date-parts":[[2022,10,10]],"date-time":"2022-10-10T00:00:00Z","timestamp":1665360000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2022,10,10]]},"DOI":"10.1145\/3551349.3560414","type":"proceedings-article","created":{"date-parts":[[2023,1,5]],"date-time":"2023-01-05T20:43:54Z","timestamp":1672951434000},"page":"1-12","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":10,"title":["A Transferable Time Series Forecasting Service Using Deep Transformer Model for Online Systems"],"prefix":"10.1145","author":[{"given":"Tao","family":"Huang","sequence":"first","affiliation":[{"name":"Tencent, China"}]},{"given":"Pengfei","family":"Chen","sequence":"additional","affiliation":[{"name":"School of Data and Computer Science, Sun Yat-sen University, China"}]},{"given":"Jingrun","family":"Zhang","sequence":"additional","affiliation":[{"name":"School of Data and Computer Science, Sun Yat-sen University, China"}]},{"given":"Ruipeng","family":"Li","sequence":"additional","affiliation":[{"name":"Tencent, China"}]},{"given":"Rui","family":"Wang","sequence":"additional","affiliation":[{"name":"Tencent, China"}]}],"member":"320","published-online":{"date-parts":[[2023,1,5]]},"reference":[{"key":"e_1_3_2_1_1_1","doi-asserted-by":"publisher","DOI":"10.1109\/UKSim.2014.67"},{"key":"e_1_3_2_1_2_1","volume-title":"Longformer: The long-document transformer. arXiv preprint arXiv:2004.05150(2020).","author":"Beltagy Iz","year":"2020","unstructured":"Iz Beltagy, Matthew\u00a0E Peters, and Arman Cohan. 2020. Longformer: The long-document transformer. arXiv preprint arXiv:2004.05150(2020)."},{"key":"e_1_3_2_1_3_1","doi-asserted-by":"publisher","DOI":"10.5555\/1610075.1610094"},{"key":"e_1_3_2_1_4_1","volume-title":"Time Series Analysis: Forecasting and Control","author":"Box P.","unstructured":"G.\u00a0E.\u00a0P. Box and G.\u00a0M. Jenkins. 1994. Time Series Analysis: Forecasting and Control. Prentice Hall."},{"key":"e_1_3_2_1_5_1","doi-asserted-by":"publisher","DOI":"10.1016\/j.neucom.2020.03.011"},{"key":"e_1_3_2_1_6_1","unstructured":"Rewon Child Scott Gray Alec Radford and Ilya Sutskever. 2019. Generating long sequences with sparse transformers."},{"key":"e_1_3_2_1_7_1","unstructured":"Hal Daum\u00e9\u00a0III. 2009. Frustratingly easy domain adaptation. arXiv preprint arXiv:0907.1815(2009)."},{"key":"e_1_3_2_1_8_1","doi-asserted-by":"publisher","DOI":"10.1016\/j.rser.2017.02.085"},{"key":"e_1_3_2_1_9_1","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2011.2178556"},{"key":"e_1_3_2_1_10_1","first-page":"1","article-title":"A transformer self-attention model for time series forecasting","volume":"9","author":"Farsani R\u00a0Mohammdi","year":"2021","unstructured":"R\u00a0Mohammdi Farsani and E Pazouki. 2021. A transformer self-attention model for time series forecasting. Journal of Electrical and Computer Engineering Innovations (JECEI) 9, 1(2021), 1\u201310.","journal-title":"Journal of Electrical and Computer Engineering Innovations (JECEI)"},{"key":"e_1_3_2_1_11_1","volume-title":"Design science in information systems research. MIS quarterly","author":"Hevner R","year":"2004","unstructured":"Alan\u00a0R Hevner, Salvatore\u00a0T March, Jinsoo Park, and Sudha Ram. 2004. Design science in information systems research. MIS quarterly (2004), 75\u2013105."},{"key":"e_1_3_2_1_12_1","volume-title":"Correcting sample selection bias by unlabeled data. Advances in neural information processing systems 19","author":"Huang Jiayuan","year":"2006","unstructured":"Jiayuan Huang, Arthur Gretton, Karsten Borgwardt, Bernhard Sch\u00f6lkopf, and Alex Smola. 2006. Correcting sample selection bias by unlabeled data. Advances in neural information processing systems 19 (2006)."},{"key":"e_1_3_2_1_13_1","doi-asserted-by":"publisher","DOI":"10.1145\/3485447.3511984"},{"key":"e_1_3_2_1_14_1","volume-title":"Forecasting with exponential smoothing: the state space approach","author":"Hyndman Rob","unstructured":"Rob Hyndman, Anne\u00a0B Koehler, J\u00a0Keith Ord, and Ralph\u00a0D Snyder. 2008. Forecasting with exponential smoothing: the state space approach. Springer Science & Business Media."},{"key":"e_1_3_2_1_15_1","doi-asserted-by":"publisher","DOI":"10.1073\/pnas.1611835114"},{"key":"e_1_3_2_1_16_1","volume-title":"Reformer: The efficient transformer.","author":"Kitaev Nikita","year":"2020","unstructured":"Nikita Kitaev, \u0141ukasz Kaiser, and Anselm Levskaya. 2020. Reformer: The efficient transformer."},{"key":"e_1_3_2_1_17_1","first-page":"5243","article-title":"Enhancing the locality and breaking the memory bottleneck of transformer on time series forecasting","volume":"32","author":"Li Shiyang","year":"2019","unstructured":"Shiyang Li, Xiaoyong Jin, Yao Xuan, Xiyou Zhou, Wenhu Chen, Yu-Xiang Wang, and Xifeng Yan. 2019. Enhancing the locality and breaking the memory bottleneck of transformer on time series forecasting. Advances in Neural Information Processing Systems 32 (2019), 5243\u20135253.","journal-title":"Advances in Neural Information Processing Systems"},{"key":"e_1_3_2_1_18_1","volume-title":"Learning without forgetting","author":"Li Zhizhong","year":"2017","unstructured":"Zhizhong Li and Derek Hoiem. 2017. Learning without forgetting. IEEE transactions on pattern analysis and machine intelligence 40, 12(2017), 2935\u20132947."},{"key":"e_1_3_2_1_19_1","article-title":"Time-series forecasting with deep learning: a survey","volume":"379","author":"Lim Bryan","year":"2021","unstructured":"Bryan Lim and Stefan Zohren. 2021. Time-series forecasting with deep learning: a survey. Philosophical Transactions of the Royal Society A 379, 2194(2021), 20200209.","journal-title":"Philosophical Transactions of the Royal Society A"},{"key":"e_1_3_2_1_20_1","volume-title":"Gradient episodic memory for continual learning. Advances in neural information processing systems 30","author":"Lopez-Paz David","year":"2017","unstructured":"David Lopez-Paz and Marc\u2019Aurelio Ranzato. 2017. Gradient episodic memory for continual learning. Advances in neural information processing systems 30 (2017)."},{"key":"e_1_3_2_1_21_1","doi-asserted-by":"publisher","DOI":"10.1109\/ISSRE.2018.00013"},{"key":"e_1_3_2_1_22_1","doi-asserted-by":"publisher","DOI":"10.1149\/2.0222003JES"},{"key":"e_1_3_2_1_23_1","volume-title":"Trend analysis of climate time series: A review of methods. Earth-science reviews 190","author":"Mudelsee Manfred","year":"2019","unstructured":"Manfred Mudelsee. 2019. Trend analysis of climate time series: A review of methods. Earth-science reviews 190 (2019), 310\u2013322."},{"key":"e_1_3_2_1_24_1","doi-asserted-by":"publisher","DOI":"10.1049\/iet-its.2018.0064"},{"key":"e_1_3_2_1_25_1","volume-title":"Summer school on machine learning","author":"Rasmussen Carl\u00a0Edward","unstructured":"Carl\u00a0Edward Rasmussen. 2003. Gaussian processes in machine learning. In Summer school on machine learning. Springer, 63\u201371."},{"key":"e_1_3_2_1_26_1","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2017.587"},{"key":"e_1_3_2_1_27_1","doi-asserted-by":"publisher","DOI":"10.1016\/j.ijforecast.2019.07.001"},{"key":"e_1_3_2_1_28_1","doi-asserted-by":"publisher","DOI":"10.1142\/S0129065704001899"},{"key":"e_1_3_2_1_29_1","volume-title":"Financial time series forecasting with deep learning: A systematic literature review: 2005\u20132019. Applied soft computing 90","author":"Sezer Omer\u00a0Berat","year":"2020","unstructured":"Omer\u00a0Berat Sezer, Mehmet\u00a0Ugur Gudelek, and Ahmet\u00a0Murat Ozbayoglu. 2020. Financial time series forecasting with deep learning: A systematic literature review: 2005\u20132019. Applied soft computing 90 (2020), 106181."},{"key":"e_1_3_2_1_30_1","doi-asserted-by":"publisher","DOI":"10.1109\/ICMLA.2018.00227"},{"key":"e_1_3_2_1_31_1","doi-asserted-by":"publisher","DOI":"10.1145\/1401890.1401969"},{"key":"e_1_3_2_1_32_1","doi-asserted-by":"publisher","DOI":"10.1007\/s10463-008-0197-x"},{"key":"e_1_3_2_1_33_1","doi-asserted-by":"publisher","DOI":"10.1080\/00031305.2017.1380080"},{"key":"e_1_3_2_1_34_1","volume-title":"Attention is all you need. Advances in neural information processing systems 30","author":"Vaswani Ashish","year":"2017","unstructured":"Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan\u00a0N Gomez, \u0141ukasz Kaiser, and Illia Polosukhin. 2017. Attention is all you need. Advances in neural information processing systems 30 (2017)."},{"key":"e_1_3_2_1_35_1","volume-title":"Softly associative transfer learning for cross-domain classification","author":"Wang Deqing","year":"2019","unstructured":"Deqing Wang, Chenwei Lu, Junjie Wu, Hongfu Liu, Wenjie Zhang, Fuzhen Zhuang, and Hui Zhang. 2019. Softly associative transfer learning for cross-domain classification. IEEE transactions on cybernetics 50, 11 (2019), 4709\u20134721."},{"key":"e_1_3_2_1_36_1","doi-asserted-by":"publisher","DOI":"10.1109\/ICDM.2017.150"},{"key":"e_1_3_2_1_37_1","volume-title":"Linformer: Self-attention with linear complexity. arXiv preprint arXiv:2006.04768(2020).","author":"Wang Sinong","year":"2020","unstructured":"Sinong Wang, Belinda\u00a0Z Li, Madian Khabsa, Han Fang, and Hao Ma. 2020. Linformer: Self-attention with linear complexity. arXiv preprint arXiv:2006.04768(2020)."},{"key":"e_1_3_2_1_38_1","doi-asserted-by":"publisher","DOI":"10.1007\/978-1-4615-4625-2"},{"key":"e_1_3_2_1_39_1","unstructured":"Neo Wu Bradley Green Xue Ben and Shawn O\u2019Banion. 2020. Deep transformer models for time series forecasting: The influenza prevalence case. arXiv preprint arXiv:2001.08317(2020)."},{"key":"e_1_3_2_1_40_1","doi-asserted-by":"publisher","DOI":"10.1145\/3394486.3403118"},{"key":"e_1_3_2_1_41_1","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i12.17325"}],"event":{"name":"ASE '22: 37th IEEE\/ACM International Conference on Automated Software Engineering","location":"Rochester MI USA","acronym":"ASE '22"},"container-title":["Proceedings of the 37th IEEE\/ACM International Conference on Automated Software Engineering"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3551349.3560414","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3551349.3560414","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,8,22]],"date-time":"2025-08-22T07:57:39Z","timestamp":1755849459000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3551349.3560414"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2022,10,10]]},"references-count":41,"alternative-id":["10.1145\/3551349.3560414","10.1145\/3551349"],"URL":"https:\/\/doi.org\/10.1145\/3551349.3560414","relation":{},"subject":[],"published":{"date-parts":[[2022,10,10]]},"assertion":[{"value":"2023-01-05","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}