{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,22]],"date-time":"2026-04-22T19:45:49Z","timestamp":1776887149216,"version":"3.51.2"},"publisher-location":"New York, NY, USA","reference-count":19,"publisher":"ACM","license":[{"start":{"date-parts":[[2024,11,22]],"date-time":"2024-11-22T00:00:00Z","timestamp":1732233600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/"}],"funder":[{"name":"Prospective Foundation of Technology and Engineering Center for Space utilization, Chinese Academy of Sciences OF FUNDER","award":["T303271"],"award-info":[{"award-number":["T303271"]}]}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2024,11,22]]},"DOI":"10.1145\/3711507.3711508","type":"proceedings-article","created":{"date-parts":[[2025,4,15]],"date-time":"2025-04-15T07:33:01Z","timestamp":1744702381000},"page":"1-9","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":5,"title":["PatchTCN: Patch-Based Transformer Convolutional Network for Times Series Analysis"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-1405-4603","authenticated-orcid":false,"given":"Jian","family":"Zhang","sequence":"first","affiliation":[{"name":"Key Laboratory of Space Utilization, Technology and Engineering Center for Space Utilization, Chinese Academy of Sciences University of Chinese Academy of Sciences, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-6642-4928","authenticated-orcid":false,"given":"Lili","family":"Guo","sequence":"additional","affiliation":[{"name":"Key Laboratory of Space Utilization, Technology and Engineering Center for Space Utilization, Chinese Academy of Sciences, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-1609-2192","authenticated-orcid":false,"given":"Lei","family":"Song","sequence":"additional","affiliation":[{"name":"Key Laboratory of Space Utilization, Technology and Engineering Center for Space Utilization, Chinese Academy of Sciences, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0009-0009-0965-7221","authenticated-orcid":false,"given":"Song","family":"Gao","sequence":"additional","affiliation":[{"name":"Key Laboratory of Space Utilization, Technology and Engineering Center for Space Utilization, Chinese Academy of Sciences, Beijing, China"}]},{"ORCID":"https:\/\/orcid.org\/0009-0003-7252-8015","authenticated-orcid":false,"given":"Chuanzhu","family":"Hao","sequence":"additional","affiliation":[{"name":"College of Electrical Engineering Shandong Huayu University of Technology, Shandong, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-3731-1251","authenticated-orcid":false,"given":"Xuzhi","family":"Li","sequence":"additional","affiliation":[{"name":"Key Laboratory of Space Utilization, Technology and Engineering Center for Space Utilization, Chinese Academy of Sciences, Beijing, China"}]}],"member":"320","published-online":{"date-parts":[[2025,4,15]]},"reference":[{"key":"e_1_3_3_1_1_2","volume-title":"Proceedings of the The Twelfth International Conference on Learning Representations","author":"Luo D.","year":"2024","unstructured":"Luo, D.; Wang, X. ModernTCN: A modern pure convolution structure for general time series analysis. In Proceedings of the The Twelfth International Conference on Learning Representations, 2024."},{"key":"e_1_3_3_1_2_2","volume-title":"A time series is worth 64 words: Long-term forecasting with transformers. arXiv preprint arXiv:2211.14730","author":"Nie Y.","year":"2022","unstructured":"Nie, Y.; Nguyen, N.H.; Sinthong, P.; Kalagnanam, J. A time series is worth 64 words: Long-term forecasting with transformers. arXiv preprint arXiv:2211.14730 2022."},{"key":"e_1_3_3_1_3_2","volume-title":"Proceedings of the International Conference on Learning Representations","author":"Wu H.","year":"2023","unstructured":"Wu, H.; Hu, T.; Liu, Y.; Zhou, H.; Wang, J.; Long, M. TimesNet: Temporal 2D-Variation Modeling for General Time Series Analysis. In Proceedings of the International Conference on Learning Representations, 2023."},{"key":"e_1_3_3_1_4_2","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i12.17325"},{"key":"e_1_3_3_1_5_2","volume-title":"Autoformer: Decomposition transformers with auto-correlation for long-term series forecasting. Advances in neural information processing systems","author":"Wu H.","year":"2021","unstructured":"Wu, H.; Xu, J.; Wang, J.; Long, M. Autoformer: Decomposition transformers with auto-correlation for long-term series forecasting. Advances in neural information processing systems 2021, 34, 22419-22430."},{"key":"e_1_3_3_1_6_2","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-319-49409-8_7"},{"key":"e_1_3_3_1_7_2","first-page":"491","volume":"2020","author":"Chen Y.","unstructured":"Chen, Y.; Kang, Y.; Chen, Y.; Wang, Z. Probabilistic forecasting with temporal convolutional neural network. Neurocomputing 2020, 399, 491-501.","journal-title":"Neurocomputing"},{"key":"e_1_3_3_1_8_2","volume-title":"Proceedings of the The Eleventh International Conference on Learning Representations","author":"Wang H.","year":"2022","unstructured":"Wang, H.; Peng, J.; Huang, F.; Wang, J.; Chen, J.; Xiao, Y. Micn: Multi-scale local and global context modeling for long-term series forecasting. In Proceedings of the The Eleventh International Conference on Learning Representations, 2022."},{"key":"e_1_3_3_1_9_2","volume-title":"Proceedings of the The eleventh international conference on learning representations","author":"Wu H.","year":"2022","unstructured":"Wu, H.; Hu, T.; Liu, Y.; Zhou, H.; Wang, J.; Long, M. Timesnet: Temporal 2d-variation modeling for general time series analysis. In Proceedings of the The eleventh international conference on learning representations, 2022."},{"key":"e_1_3_3_1_10_2","volume-title":"Attention is all you need. Advances in neural information processing systems","author":"Vaswani A.","year":"2017","unstructured":"Vaswani, A.; Shazeer, N.; Parmar, N.; Uszkoreit, J.; Jones, L.; Gomez, A.N.; Kaiser, \u0141.; Polosukhin, I. Attention is all you need. Advances in neural information processing systems 2017, 30."},{"key":"e_1_3_3_1_11_2","volume-title":"Hierarchical transformers for multi-document summarization. arXiv preprint arXiv:1905.13164","author":"Liu Y.","year":"2019","unstructured":"Liu, Y.; Lapata, M. Hierarchical transformers for multi-document summarization. arXiv preprint arXiv:1905.13164 2019."},{"key":"e_1_3_3_1_12_2","volume-title":"Reformer: The efficient transformer. arXiv preprint arXiv:2001.04451","author":"Kitaev N.","year":"2020","unstructured":"Kitaev, N.; Kaiser, \u0141.; Levskaya, A. Reformer: The efficient transformer. arXiv preprint arXiv:2001.04451 2020."},{"key":"e_1_3_3_1_13_2","volume-title":"Proceedings of the International conference on learning representations","author":"Liu S.","year":"2021","unstructured":"Liu, S.; Yu, H.; Liao, C.; Li, J.; Lin, W.; Liu, A.X.; Dustdar, S. Pyraformer: Low-complexity pyramidal attention for long-range time series modeling and forecasting. In Proceedings of the International conference on learning representations, 2021."},{"key":"e_1_3_3_1_14_2","first-page":"27268","volume-title":"Proceedings of the International conference on machine learning. PMLR","author":"Zhou T.","year":"2022","unstructured":"Zhou, T.; Ma, Z.; Wen, Q.; Wang, X.; Sun, L.; Jin, R. Fedformer: Frequency enhanced decomposed transformer for long-term series forecasting. In Proceedings of the International conference on machine learning. PMLR, 2022, pp. 27268-27286."},{"key":"e_1_3_3_1_15_2","first-page":"9881","volume":"2022","author":"Liu Y.","unstructured":"Liu, Y.; Wu, H.; Wang, J.; Long, M. Non-stationary transformers: Exploring the stationarity in time series forecasting. Advances in Neural Information Processing Systems 2022, 35, 9881-9893.","journal-title":"Advances in Neural Information Processing Systems"},{"key":"e_1_3_3_1_16_2","volume-title":"Etsformer: Exponential smoothing transformers for time-series forecasting. arXiv preprint arXiv:2202.01381","author":"Woo G.","year":"2022","unstructured":"Woo, G.; Liu, C.; Sahoo, D.; Kumar, A.; Hoi, S. Etsformer: Exponential smoothing transformers for time-series forecasting. arXiv preprint arXiv:2202.01381 2022."},{"key":"e_1_3_3_1_17_2","volume-title":"Proceedings of the The eleventh international conference on learning representations","author":"Zhang Y.","year":"2022","unstructured":"Zhang, Y.; Yan, J. Crossformer: Transformer utilizing cross-dimension dependency for multivariate time series forecasting. In Proceedings of the The eleventh international conference on learning representations, 2022."},{"key":"e_1_3_3_1_18_2","volume-title":"One fits all: Power general time series analysis by pretrained lm. Advances in neural information processing systems","author":"Zhou T.","year":"2024","unstructured":"Zhou, T.; Niu, P.; et al. One fits all: Power general time series analysis by pretrained lm. Advances in neural information processing systems 2024, 36."},{"key":"e_1_3_3_1_19_2","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v37i9.26317"}],"event":{"name":"ISCAI 2024: 2024 3rd International Symposium on Computing and Artificial Intelligence","location":"Dali China","acronym":"ISCAI 2024"},"container-title":["Proceedings of the 2024 3rd International Symposium on Computing and Artificial Intelligence"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3711507.3711508","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3711507.3711508","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,6,19]],"date-time":"2025-06-19T01:18:28Z","timestamp":1750295908000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3711507.3711508"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,11,22]]},"references-count":19,"alternative-id":["10.1145\/3711507.3711508","10.1145\/3711507"],"URL":"https:\/\/doi.org\/10.1145\/3711507.3711508","relation":{},"subject":[],"published":{"date-parts":[[2024,11,22]]},"assertion":[{"value":"2025-04-15","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}