{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,11,15]],"date-time":"2025-11-15T07:43:21Z","timestamp":1763192601215,"version":"3.45.0"},"reference-count":30,"publisher":"IEEE","license":[{"start":{"date-parts":[[2025,6,30]],"date-time":"2025-06-30T00:00:00Z","timestamp":1751241600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2025,6,30]],"date-time":"2025-06-30T00:00:00Z","timestamp":1751241600000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2025,6,30]]},"DOI":"10.1109\/ijcnn64981.2025.11228434","type":"proceedings-article","created":{"date-parts":[[2025,11,14]],"date-time":"2025-11-14T18:46:15Z","timestamp":1763145975000},"page":"1-7","source":"Crossref","is-referenced-by-count":0,"title":["Time2Text-PM: Reprogramming Pre-trained Models for Few-Shot Intention Recognition"],"prefix":"10.1109","author":[{"given":"Yantong","family":"Chen","sequence":"first","affiliation":[{"name":"Nanjing University of Aeronautics and Astronautics,Nanjing,China"}]},{"given":"Feng","family":"Hu","sequence":"additional","affiliation":[{"name":"Nanjing University of Aeronautics and Astronautics,Nanjing,China"}]}],"member":"263","reference":[{"issue":"4","key":"ref1","first-page":"1","article-title":"A review of research on target operational intention recognition","volume":"52","author":"ZHANG","year":"2024","journal-title":"Modern Defence Technology"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.12677\/ORF.2014.44009"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1515\/9780691214696"},{"issue":"11","key":"ref4","first-page":"195","article-title":"Combat intention recognition for aerial targets based on deep neural network","volume":"39","author":"ZHOU","year":"2018","journal-title":"Acta Aeronautica et Astronautica Sinica"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P18-1031"},{"issue":"2","key":"ref6","article-title":"Bert: Pre-training of deep bidirectional transformers for language understanding","volume-title":"Proceedings of naacL-HLT","volume":"1","author":"Kenton"},{"article-title":"Improving language understanding by generative pre-training","year":"2018","author":"Radford","key":"ref7"},{"article-title":"Beit: Bert pre-training of image transformers","year":"2021","author":"Bao","key":"ref8"},{"article-title":"Timesnet: Temporal 2d-variation modeling for general time series analysis","year":"2022","author":"Wu","key":"ref9"},{"key":"ref10","article-title":"Large language models are zero-shot time series forecasters","volume":"36","author":"Gruver","year":"2024","journal-title":"Advances in Neural Information Processing Systems"},{"key":"ref11","first-page":"43 322","article-title":"One fits all: Power general time series analysis by pretrained lm","volume":"36","author":"Zhou","year":"2023","journal-title":"Advances in neural information processing systems"},{"article-title":"Time-llm: Time series forecasting by reprogramming large language models","year":"2023","author":"Jin","key":"ref12"},{"article-title":"Tempo: Prompt-based generative pre-trained transformer for time series forecasting","year":"2023","author":"Cao","key":"ref13"},{"key":"ref14","article-title":"Lag-llama: Towards foundation models for time series forecasting","author":"Rasul","year":"2023","journal-title":"R0-FoMo: Robustness of Few-shot and Zero-shot Learning in Large Foundation Models"},{"article-title":"Moment: a family of open time-series foundation models.. 2024","year":"2024","author":"Goswami","key":"ref15"},{"issue":"3","key":"ref16","first-page":"56","article-title":"Research on intelligent recognition model of tactical intent combined with attention mechanism","volume":"39","author":"huan Zhao","year":"2021","journal-title":"Aerospace Control"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1007\/s12065-022-00728-9"},{"issue":"5","key":"ref18","first-page":"24","article-title":"Bilstm-attention: An air target tactical intention recognition model","volume":"28","author":"Teng","year":"2021","journal-title":"Aero Weaponry"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1360\/SSI-2019-0106"},{"article-title":"Reversible instance normalization for accurate time-series forecasting against distribution shift","volume-title":"International Conference on Learning Representations","author":"Kim","key":"ref20"},{"article-title":"A time series is worth 64 words: Long-term forecasting with transformers","year":"2022","author":"Nie","key":"ref21"},{"article-title":"Reprogramming under constraints: Revisiting efficient and reliable transferability of lottery tickets","year":"2023","author":"Misra","key":"ref22"},{"key":"ref23","first-page":"11 808","article-title":"Voice2series: Reprogramming acoustic models for time series classification","volume-title":"International conference on machine learning","author":"Yang"},{"issue":"8","key":"ref24","first-page":"9","article-title":"Language models are unsupervised multitask learners","volume":"1","author":"Radford","year":"2019","journal-title":"OpenAI blog"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1093\/nsr\/nwae403"},{"article-title":"Non-stationary transformers: Exploring the stationarity in time series forecasting","year":"2023","author":"Liu","key":"ref26"},{"key":"ref27","doi-asserted-by":"crossref","DOI":"10.1609\/aaai.v35i12.17325","article-title":"Informer: Beyond efficient transformer for long sequence time-series forecasting","author":"Zhou","year":"2021"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1038\/323533a0"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1162\/neco.1997.9.8.1735"},{"article-title":"Adam: A method for stochastic optimization","year":"2014","author":"Kingma","key":"ref30"}],"event":{"name":"2025 International Joint Conference on Neural Networks (IJCNN)","start":{"date-parts":[[2025,6,30]]},"location":"Rome, Italy","end":{"date-parts":[[2025,7,5]]}},"container-title":["2025 International Joint Conference on Neural Networks (IJCNN)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/11227166\/11227148\/11228434.pdf?arnumber=11228434","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,11,15]],"date-time":"2025-11-15T07:39:47Z","timestamp":1763192387000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/11228434\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,6,30]]},"references-count":30,"URL":"https:\/\/doi.org\/10.1109\/ijcnn64981.2025.11228434","relation":{},"subject":[],"published":{"date-parts":[[2025,6,30]]}}}