{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,11,25]],"date-time":"2025-11-25T06:56:00Z","timestamp":1764053760981,"version":"3.28.0"},"reference-count":31,"publisher":"IEEE","license":[{"start":{"date-parts":[[2021,10,6]],"date-time":"2021-10-06T00:00:00Z","timestamp":1633478400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2021,10,6]],"date-time":"2021-10-06T00:00:00Z","timestamp":1633478400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2021,10,6]]},"DOI":"10.1109\/dsaa53316.2021.9564126","type":"proceedings-article","created":{"date-parts":[[2021,10,20]],"date-time":"2021-10-20T22:54:28Z","timestamp":1634770468000},"page":"1-12","source":"Crossref","is-referenced-by-count":8,"title":["Constructing Global Coherence Representations: Identifying Interpretability and Coherences of Transformer Attention in Time Series Data"],"prefix":"10.1109","author":[{"given":"Leonid","family":"Schwenke","sequence":"first","affiliation":[{"name":"Semantic Information Systems Group (SIS), Osnabr&#x00FC;ck University,Osnabr&#x00FC;ck,Germany"}]},{"given":"Martin","family":"Atzmueller","sequence":"additional","affiliation":[{"name":"Semantic Information Systems Group (SIS), Osnabr&#x00FC;ck University,Osnabr&#x00FC;ck,Germany"}]}],"member":"263","reference":[{"key":"ref31","article-title":"Pretrained transformers as universal computation engines","author":"lu","year":"2021","journal-title":"ArXiv Preprint"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1161\/01.CIR.101.23.e215"},{"key":"ref10","article-title":"En-hancing the locality and breaking the memory bottleneck of transformer on time series forecasting","author":"li","year":"2019","journal-title":"ArXiv Preprint"},{"key":"ref11","article-title":"Hopfield networks is all you need","author":"ramsauer","year":"2020","journal-title":"ArXiv Preprint"},{"key":"ref12","article-title":"An image is worth 16x16 words: Transformers for image recognition at scale","author":"dosovitskiy","year":"2020","journal-title":"ArXiv Preprint"},{"key":"ref13","article-title":"Visualizing attention in transformer-based language representationmodels","author":"vig","year":"2019","journal-title":"ArXiv Preprint"},{"key":"ref14","article-title":"Attviz: Online exploration of self-attention for transparent neural language modeling","author":"\u0161krlj","year":"2020","journal-title":"ArXiv Preprint"},{"key":"ref15","first-page":"270","article-title":"Visualizing transformers for nlp: a brief survey","author":"bra?oveanu","year":"0","journal-title":"2020 24th International Conference Information Visualisation (IV)"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/W19-4828"},{"key":"ref17","article-title":"Interpretable machine learning: A brief survey from the predictive maintenance perspective","author":"vollert","year":"0","journal-title":"Proc of IEEE Conference on Emerging Technologies and Factory Automation (ETFA)"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.319"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1038\/s41524-019-0196-x"},{"key":"ref28","first-page":"27","article-title":"Time-series similarity queries employing a feature-based approach","author":"alcock","year":"0","journal-title":"7th Hellenic Conference on Informatics"},{"key":"ref4","article-title":"Understanding multi-head attention in abstractive summarization","author":"baan","year":"2019","journal-title":"ArXiv Preprint"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.acl-main.385"},{"key":"ref3","article-title":"An attentive survey of attention models","author":"chaudhari","year":"2019","journal-title":"ArXiv Preprint"},{"key":"ref6","doi-asserted-by":"crossref","first-page":"832","DOI":"10.3390\/electronics8080832","article-title":"Machine learning interpretability: A survey on methods and metrics","volume":"8","author":"carvalho","year":"2019","journal-title":"Electronics"},{"journal-title":"The UCR time series classification archive","year":"2018","author":"dau","key":"ref29"},{"key":"ref5","article-title":"Explainable artificial intelligence (xai) on timeseries data: A survey","author":"rojat","year":"2021","journal-title":"ArXiv Preprint"},{"key":"ref8","article-title":"Show me what you're looking for: Visualizing abstracted transformer attention for enhancing their local interpretability on time series data","author":"schwenke","year":"2021","journal-title":"Proc 34th International Florida Artificial Intelligence Research Society Conference (FLAIRS-2021)"},{"key":"ref7","article-title":"Multi-head attention: Collaborate instead of concatenate","author":"cordonnier","year":"2020","journal-title":"ArXiv Preprint"},{"key":"ref2","article-title":"Efficient transformers: A survey","author":"tay","year":"2020","journal-title":"ArXiv Preprint"},{"key":"ref9","article-title":"Temporal fusion transformers for interpretable multi-horizon time series forecasting","author":"lim","year":"2019","journal-title":"ArXiv Preprint"},{"key":"ref1","article-title":"Attention is all you need","author":"vaswani","year":"2017","journal-title":"ArXiv Preprint"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1282"},{"key":"ref22","article-title":"Language models are open knowledge graphs","author":"wang","year":"2020","journal-title":"ArXiv Preprint"},{"key":"ref21","article-title":"Learning to deceive with attention-based explanations","author":"pruthi","year":"2019","journal-title":"ArXiv Preprint"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1007\/s10618-007-0064-z"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1145\/882085.882086"},{"key":"ref26","article-title":"On identifiability in transformers","author":"brunner","year":"2019","journal-title":"ArXiv Preprint"},{"key":"ref25","article-title":"Api design for machine learning software: experiences from the scikit-learn project","author":"buitinck","year":"2013","journal-title":"ArXiv Preprint"}],"event":{"name":"2021 IEEE 8th International Conference on Data Science and Advanced Analytics (DSAA)","start":{"date-parts":[[2021,10,6]]},"location":"Porto, Portugal","end":{"date-parts":[[2021,10,9]]}},"container-title":["2021 IEEE 8th International Conference on Data Science and Advanced Analytics (DSAA)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/9564091\/9564109\/09564126.pdf?arnumber=9564126","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,8,2]],"date-time":"2022-08-02T23:37:33Z","timestamp":1659483453000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9564126\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2021,10,6]]},"references-count":31,"URL":"https:\/\/doi.org\/10.1109\/dsaa53316.2021.9564126","relation":{},"subject":[],"published":{"date-parts":[[2021,10,6]]}}}