{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,5,12]],"date-time":"2026-05-12T23:10:30Z","timestamp":1778627430267,"version":"3.51.4"},"reference-count":78,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2020,1,1]],"date-time":"2020-01-01T00:00:00Z","timestamp":1577836800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/legalcode"}],"funder":[{"name":"Development Project of Leading Technology for Future Vehicle of the Business of Daegu Metropolitan City","award":["20171105"],"award-info":[{"award-number":["20171105"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Access"],"published-print":{"date-parts":[[2020]]},"DOI":"10.1109\/access.2020.2997917","type":"journal-article","created":{"date-parts":[[2020,5,27]],"date-time":"2020-05-27T20:31:59Z","timestamp":1590611519000},"page":"100857-100869","source":"Crossref","is-referenced-by-count":25,"title":["Efficient Visual Tracking With Stacked Channel-Spatial Attention Learning"],"prefix":"10.1109","volume":"8","author":[{"given":"Md. Maklachur","family":"Rahman","sequence":"first","affiliation":[]},{"given":"Mustansar","family":"Fiaz","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0003-0239-6785","authenticated-orcid":false,"given":"Soon Ki","family":"Jung","sequence":"additional","affiliation":[]}],"member":"263","reference":[{"key":"ref73","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2011.239"},{"key":"ref72","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2014.2345390"},{"key":"ref71","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2015.2509974"},{"key":"ref70","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2016.2609928"},{"key":"ref76","doi-asserted-by":"publisher","DOI":"10.1016\/j.knosys.2016.09.014"},{"key":"ref77","first-page":"463","article-title":"Denssiam: End-to-end densely-siamese network with self-attention model for object tracking","author":"abdelpakey","year":"2018","journal-title":"Int Symp Visual Computing"},{"key":"ref74","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2017.2669880"},{"key":"ref39","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2017.531"},{"key":"ref75","article-title":"Transferring rich feature hierarchies for robust visual tracking","author":"wang","year":"2015","journal-title":"arXiv 1501 04587"},{"key":"ref38","first-page":"116","article-title":"Multiple context features in siamese networks for visual object tracking","author":"morimitsu","year":"2018","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref78","doi-asserted-by":"publisher","DOI":"10.1109\/ROBIO.2011.6181739"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.5772\/intechopen.86235"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.1145\/3309665"},{"key":"ref31","doi-asserted-by":"crossref","first-page":"1442","DOI":"10.1109\/TPAMI.2013.230","article-title":"Visual tracking: An experimental survey","volume":"36","author":"smeulders","year":"2014","journal-title":"IEEE Trans Pattern Anal Mach Intell"},{"key":"ref30","first-page":"445","article-title":"A benchmark and simulator for uav tracking","author":"mueller","year":"2016","journal-title":"Proc Eur Conf Comput Vis"},{"key":"ref37","first-page":"749","article-title":"Learning to track at 100 fps with deep regression networks","author":"held","year":"2016","journal-title":"Proc Eur Conf Comput Vis"},{"key":"ref36","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2015.352"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1109\/ICCVW.2015.84"},{"key":"ref34","first-page":"472","article-title":"Beyond correlation filters: Learning continuous convolution operators for visual tracking","author":"danelljan","year":"2016","journal-title":"Proc Eur Conf Comput Vis"},{"key":"ref60","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2017.279"},{"key":"ref62","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2019.2895411"},{"key":"ref61","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2015.490"},{"key":"ref63","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2019.00140"},{"key":"ref28","first-page":"5","article-title":"The sixth visual object tracking vot2018 challenge results","author":"kristan","year":"2018","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref64","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2019.00100"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.1109\/ICCVW.2017.230"},{"key":"ref65","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2019.2955292"},{"key":"ref66","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2017.585"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2015.2482905"},{"key":"ref67","first-page":"188","article-title":"Meem: Robust tracking via multiple experts using entropy minimization","author":"zhang","year":"2014","journal-title":"Proc Eur Conf Comput Vis"},{"key":"ref68","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2015.7298675"},{"key":"ref2","article-title":"Computer vision for autonomous vehicles: Problems, datasets and state of the art","author":"janai","year":"2017","journal-title":"arXiv 1704 05519"},{"key":"ref69","first-page":"6","article-title":"The visual object tracking vot2014 challenge results","volume":"1","author":"kristan","year":"2014","journal-title":"Proc ECCV"},{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/EUROCON.2011.5929144"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1145\/3297280.3297416"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00508"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2015.357"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2014.2388226"},{"key":"ref23","first-page":"459","article-title":"Triplet loss in siamese network for object tracking","author":"dong","year":"2018","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref26","first-page":"777","article-title":"The visual object tracking vot2016 challenge results","volume":"9914","author":"kristan","year":"2016","journal-title":"Proc Eur Conf Comput Vis"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2013.312"},{"key":"ref50","doi-asserted-by":"publisher","DOI":"10.1007\/s11263-011-0495-2"},{"key":"ref51","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2019.00326"},{"key":"ref59","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.156"},{"key":"ref58","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00935"},{"key":"ref57","doi-asserted-by":"publisher","DOI":"10.1007\/s11263-015-0816-y"},{"key":"ref56","article-title":"Got-10k: A large high-diversity benchmark for generic object tracking in the wild","author":"huang","year":"2019","journal-title":"IEEE Trans Pattern Anal Mach Intell"},{"key":"ref55","first-page":"1097","article-title":"Imagenet classification with deep convolutional neural networks","author":"krizhevsky","year":"2012","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref54","article-title":"Paying more attention to attention: Improving the performance of convolutional neural networks via attention transfer","author":"zagoruyko","year":"2016","journal-title":"arXiv 1612 03928"},{"key":"ref53","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2019.2922494"},{"key":"ref52","doi-asserted-by":"publisher","DOI":"10.1038\/nrn755"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2010.5539960"},{"key":"ref11","first-page":"702","article-title":"Exploiting the circulant structure of tracking-by-detection with kernels","author":"henriques","year":"2012","journal-title":"Proc Eur Conf Comput Vis"},{"key":"ref40","first-page":"152","article-title":"Learning dynamic memory networks for object tracking","author":"yang","year":"2018","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2014.143"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2015.7298965"},{"key":"ref14","first-page":"91","article-title":"Faster R-CNN: Towards real-time object detection with region proposal networks","author":"ren","year":"2015","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref15","first-page":"568","article-title":"Two-stream convolutional networks for action recognition in videos","author":"simonyan","year":"2014","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.466"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1109\/ICPR.2016.7899807"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2017.733"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.465"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/34.598226"},{"key":"ref3","doi-asserted-by":"crossref","first-page":"1704","DOI":"10.1109\/TPAMI.2012.242","article-title":"Learning to track and identify players from broadcast sports videos","volume":"35","author":"lu","year":"2013","journal-title":"IEEE Trans Pattern Anal Mach Intell"},{"key":"ref6","article-title":"DCFNet: Discriminant correlation filters network for visual tracking","author":"wang","year":"2017","journal-title":"arXiv 1704 04057"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1109\/ICIP.2018.8451102"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1016\/j.patrec.2014.03.025"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2017.196"},{"key":"ref49","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2017.683"},{"key":"ref9","first-page":"597","article-title":"Online tracking by learning discriminative saliency map with convolutional neural network","author":"hong","year":"2015","journal-title":"Proc Int Conf Mach Learn"},{"key":"ref46","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2015.2510583"},{"key":"ref45","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2019.2929034"},{"key":"ref48","first-page":"3","article-title":"Cbam: Convolutional block attention module","author":"woo","year":"2018","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref47","doi-asserted-by":"publisher","DOI":"10.1142\/9789812797926_0003"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.1109\/LSP.2018.2877008"},{"key":"ref41","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00510"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00064"},{"key":"ref43","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2019.2907282"}],"container-title":["IEEE Access"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/6287639\/8948470\/09102303.pdf?arnumber=9102303","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,1,27]],"date-time":"2022-01-27T20:07:48Z","timestamp":1643314068000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9102303\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2020]]},"references-count":78,"URL":"https:\/\/doi.org\/10.1109\/access.2020.2997917","relation":{},"ISSN":["2169-3536"],"issn-type":[{"value":"2169-3536","type":"electronic"}],"subject":[],"published":{"date-parts":[[2020]]}}}