{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,11]],"date-time":"2026-04-11T13:11:20Z","timestamp":1775913080035,"version":"3.50.1"},"reference-count":30,"publisher":"IEEE","license":[{"start":{"date-parts":[[2024,12,15]],"date-time":"2024-12-15T00:00:00Z","timestamp":1734220800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,12,15]],"date-time":"2024-12-15T00:00:00Z","timestamp":1734220800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024,12,15]]},"DOI":"10.1109\/bigdata62323.2024.10826009","type":"proceedings-article","created":{"date-parts":[[2025,1,16]],"date-time":"2025-01-16T18:31:23Z","timestamp":1737052283000},"page":"1428-1433","source":"Crossref","is-referenced-by-count":2,"title":["LLM4cast: Repurposed LLM for Viral Disease Forecasting"],"prefix":"10.1109","author":[{"given":"Farah","family":"Saeed","sequence":"first","affiliation":[{"name":"University of Georgia,School of Computing,Athens,GA,USA"}]},{"given":"Mohammed","family":"Aldosari","sequence":"additional","affiliation":[{"name":"University of Georgia,School of Computing,Athens,GA,USA"}]},{"given":"Ismailcem Budak","family":"Arpinar","sequence":"additional","affiliation":[{"name":"University of Georgia,School of Computing,Athens,GA,USA"}]},{"given":"John A","family":"Miller","sequence":"additional","affiliation":[{"name":"University of Georgia,School of Computing,Athens,GA,USA"}]}],"member":"263","reference":[{"key":"ref1","article-title":"Chronos: Learning the language of time series","author":"Ansari","year":"2024"},{"key":"ref2","article-title":"Parameter efficient fine tuning: A comprehensive analysis across applications","author":"Sai Balne","year":"2024"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1093\/oso\/9780198538493.001.0001"},{"key":"ref4","article-title":"Llm4ts: Two-stage fine-tuning for time-series forecasting with pre-trained llms","author":"Chang","year":"2023"},{"key":"ref5","article-title":"A decoder-only foundation model for time-series forecasting","author":"Das","year":"2023"},{"key":"ref6","article-title":"An image is worth 16x16 words: Transformers for image recognition at scale","author":"Dosovitskiy","year":"2020"},{"key":"ref7","article-title":"Timegpt-1","author":"Garza","year":"2023"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1145\/3581783.3612348"},{"key":"ref9","first-page":"5549","article-title":"Tabllm: Few-shot classification of tabular data with large language models","volume-title":"International Conference on Artificial Intelligence and Statistics","author":"Hegselmann"},{"key":"ref10","article-title":"Lora: Low-rank adaptation of large language models","author":"Hu","year":"2021"},{"key":"ref11","article-title":"Time-llm: Time series forecasting by reprogramming large language models","author":"Jin","year":"2023"},{"key":"ref12","article-title":"Reversible instance normalization for accurate time-series forecasting against distribution shift","volume-title":"International Conference on Learning Representations","author":"Kim"},{"key":"ref13","article-title":"Dora: Weight-decomposed low-rank adaptation","author":"Liu","year":"2024"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1145\/3589334.3645434"},{"key":"ref15","article-title":"Pre-trained transformers as universal computation engines","author":"Lu","year":"2021"},{"key":"ref16","article-title":"A time series is worth 64 words: Long-term forecasting with transformers","author":"Nie","year":"2022"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1109\/IJCNN.2010.5596746"},{"issue":"8","key":"ref18","first-page":"9","article-title":"Language models are unsupervised multitask learners","volume":"1","author":"Radford","year":"2019","journal-title":"OpenAI blog"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1017\/CBO9781107298019"},{"key":"ref20","article-title":"Llama: Open and efficient foundation language models","author":"Touvron","year":"2023"},{"key":"ref21","article-title":"Unified training of universal time series forecasting transformers","author":"Woo","year":"2024"},{"key":"ref22","article-title":"Timesnet: Temporal 2d-variation modeling for general time series analysis","author":"Wu","year":"2022"},{"key":"ref23","first-page":"22419","article-title":"Autoformer: Decomposition transformers with auto-correlation for long-term series forecasting","volume":"34","author":"Wu","year":"2021","journal-title":"Advances in Neural Information Processing Systems"},{"key":"ref24","article-title":"Reft: Representation finetuning for language models","author":"Wu","year":"2024"},{"key":"ref25","article-title":"Instruction-vit: Multi-modal prompts for instruction learning in vit","author":"Xiao","year":"2023"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.1109\/TKDE.2023.3342137"},{"key":"ref27","article-title":"Llama-adapter: Efficient fine-tuning of language models with zero-init attention","author":"Zhang","year":"2023"},{"key":"ref28","first-page":"27268","article-title":"Fedformer: Frequency enhanced decomposed transformer for long-term series forecasting","volume-title":"International Conference on Machine Learning","author":"Zhou"},{"key":"ref29","first-page":"36","article-title":"One fits all: Power general time series analysis by pretrained lm","author":"Zhou","year":"2024","journal-title":"Advances in neural information processing systems"},{"key":"ref30","article-title":"One fits all: Universal time series analysis by pretrained lm and specially designed adaptors","author":"Zhou","year":"2023"}],"event":{"name":"2024 IEEE International Conference on Big Data (BigData)","location":"Washington, DC, USA","start":{"date-parts":[[2024,12,15]]},"end":{"date-parts":[[2024,12,18]]}},"container-title":["2024 IEEE International Conference on Big Data (BigData)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/10824975\/10824942\/10826009.pdf?arnumber=10826009","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,1,17]],"date-time":"2025-01-17T07:48:54Z","timestamp":1737100134000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10826009\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,12,15]]},"references-count":30,"URL":"https:\/\/doi.org\/10.1109\/bigdata62323.2024.10826009","relation":{},"subject":[],"published":{"date-parts":[[2024,12,15]]}}}