{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,2]],"date-time":"2026-03-02T19:17:02Z","timestamp":1772479022109,"version":"3.50.1"},"publisher-location":"New York, NY, USA","reference-count":10,"publisher":"ACM","license":[{"start":{"date-parts":[[2023,11,15]],"date-time":"2023-11-15T00:00:00Z","timestamp":1700006400000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.acm.org\/publications\/policies\/copyright_policy#Background"}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2023,11,15]]},"DOI":"10.1145\/3600100.3623730","type":"proceedings-article","created":{"date-parts":[[2023,11,3]],"date-time":"2023-11-03T12:17:16Z","timestamp":1699013836000},"page":"224-227","update-policy":"https:\/\/doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":15,"title":["Utilizing Language Models for Energy Load Forecasting"],"prefix":"10.1145","author":[{"ORCID":"https:\/\/orcid.org\/0000-0003-1700-9215","authenticated-orcid":false,"given":"Hao","family":"Xue","sequence":"first","affiliation":[{"name":"University of New South Wales, Australia"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-1237-1664","authenticated-orcid":false,"given":"Flora D.","family":"Salim","sequence":"additional","affiliation":[{"name":"University of New South Wales, Australia"}]}],"member":"320","published-online":{"date-parts":[[2023,11,15]]},"reference":[{"key":"e_1_3_2_1_1_1","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-main.703"},{"key":"e_1_3_2_1_2_1","volume-title":"Attention is all you need. Advances in neural information processing systems 30","author":"Vaswani Ashish","year":"2017","unstructured":"Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan\u00a0N Gomez, \u0141ukasz Kaiser, and Illia Polosukhin. 2017. Attention is all you need. Advances in neural information processing systems 30 (2017)."},{"key":"e_1_3_2_1_3_1","volume-title":"Autoformer: Decomposition transformers with auto-correlation for long-term series forecasting. Advances in Neural Information Processing Systems 34","author":"Xu Jiehui","year":"2021","unstructured":"Jiehui Xu, Jianmin Wang, Mingsheng Long, 2021. Autoformer: Decomposition transformers with auto-correlation for long-term series forecasting. Advances in Neural Information Processing Systems 34 (2021)."},{"key":"e_1_3_2_1_4_1","unstructured":"Hao Xue and Flora\u00a0D. Salim. 2023. PromptCast: A New Prompt-based Learning Paradigm for Time Series Forecasting. arxiv:2210.08964"},{"key":"e_1_3_2_1_5_1","doi-asserted-by":"publisher","DOI":"10.1145\/3488560.3498387"},{"key":"e_1_3_2_1_6_1","doi-asserted-by":"publisher","DOI":"10.1145\/3557915.3561026"},{"key":"e_1_3_2_1_7_1","first-page":"17283","article-title":"Big bird: Transformers for longer sequences","volume":"33","author":"Zaheer Manzil","year":"2020","unstructured":"Manzil Zaheer, Guru Guruganesh, Kumar\u00a0Avinava Dubey, Joshua Ainslie, Chris Alberti, Santiago Ontanon, Philip Pham, Anirudh Ravula, Qifan Wang, Li Yang, 2020. Big bird: Transformers for longer sequences. Advances in Neural Information Processing Systems 33 (2020), 17283\u201317297.","journal-title":"Advances in Neural Information Processing Systems"},{"key":"e_1_3_2_1_8_1","volume-title":"International Conference on Machine Learning. PMLR, 11328\u201311339","author":"Zhang Jingqing","year":"2020","unstructured":"Jingqing Zhang, Yao Zhao, Mohammad Saleh, and Peter Liu. 2020. Pegasus: Pre-training with extracted gap-sentences for abstractive summarization. In International Conference on Machine Learning. PMLR, 11328\u201311339."},{"key":"e_1_3_2_1_9_1","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i12.17325"},{"key":"e_1_3_2_1_10_1","volume-title":"FEDformer: Frequency Enhanced Decomposed Transformer for Long-term Series Forecasting. In International Conference on Machine Learning, ICML 2022","author":"Zhou Tian","year":"2022","unstructured":"Tian Zhou, Ziqing Ma, Qingsong Wen, Xue Wang, Liang Sun, and Rong Jin. 2022. FEDformer: Frequency Enhanced Decomposed Transformer for Long-term Series Forecasting. In International Conference on Machine Learning, ICML 2022, 17-23 July 2022, Baltimore, Maryland, USA(Proceedings of Machine Learning Research, Vol.\u00a0162). PMLR, 27268\u201327286."}],"event":{"name":"BuildSys '23: The 10th ACM International Conference on Systems for Energy-Efficient Buildings, Cities, and Transportation","location":"Istanbul Turkey","acronym":"BuildSys '23"},"container-title":["Proceedings of the 10th ACM International Conference on Systems for Energy-Efficient Buildings, Cities, and Transportation"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3600100.3623730","content-type":"unspecified","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3600100.3623730","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,8,22]],"date-time":"2025-08-22T21:31:28Z","timestamp":1755898288000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3600100.3623730"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2023,11,15]]},"references-count":10,"alternative-id":["10.1145\/3600100.3623730","10.1145\/3600100"],"URL":"https:\/\/doi.org\/10.1145\/3600100.3623730","relation":{},"subject":[],"published":{"date-parts":[[2023,11,15]]},"assertion":[{"value":"2023-11-15","order":3,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}