{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,8]],"date-time":"2026-04-08T10:45:51Z","timestamp":1775645151689,"version":"3.50.1"},"reference-count":59,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2020,1,1]],"date-time":"2020-01-01T00:00:00Z","timestamp":1577836800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2020,1,1]],"date-time":"2020-01-01T00:00:00Z","timestamp":1577836800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2020,1,1]],"date-time":"2020-01-01T00:00:00Z","timestamp":1577836800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["61972012"],"award-info":[{"award-number":["61972012"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. on Image Process."],"published-print":{"date-parts":[[2020]]},"DOI":"10.1109\/tip.2020.2982828","type":"journal-article","created":{"date-parts":[[2020,3,30]],"date-time":"2020-03-30T22:42:45Z","timestamp":1585608165000},"page":"5259-5272","source":"Crossref","is-referenced-by-count":155,"title":["Gaze Estimation by Exploring Two-Eye Asymmetry"],"prefix":"10.1109","volume":"29","author":[{"ORCID":"https:\/\/orcid.org\/0000-0003-1353-9817","authenticated-orcid":false,"given":"Yihua","family":"Cheng","sequence":"first","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0002-8368-3542","authenticated-orcid":false,"given":"Xucong","family":"Zhang","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0001-9064-7964","authenticated-orcid":false,"given":"Feng","family":"Lu","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0003-0097-4537","authenticated-orcid":false,"given":"Yoichi","family":"Sato","sequence":"additional","affiliation":[]}],"member":"263","reference":[{"key":"ref39","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2014.2313123"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.5244\/C.12.43"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.1145\/2638728.2641694"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2014.229"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2011.2162740"},{"key":"ref30","first-page":"159","article-title":"Point of gaze estimation through corneal surface reflection in an active illumination environment","author":"nakazawa","year":"2012","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref37","first-page":"753","article-title":"Non-intrusive gaze tracking using artificial neural networks","author":"baluja","year":"1994","journal-title":"Proc Annu Conf Neural Inf Process Syst (NeurIPS)"},{"key":"ref36","first-page":"191","article-title":"Appearance-based eye gaze estimation","author":"tan","year":"2002","journal-title":"Proc IEEE Workshop Appl Comput Vis (WACV)"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.3758\/BF03195475"},{"key":"ref34","doi-asserted-by":"publisher","DOI":"10.1109\/TMM.2016.2576284"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1109\/TBME.2005.863952"},{"key":"ref27","doi-asserted-by":"crossref","first-page":"4","DOI":"10.1016\/j.cviu.2004.07.010","article-title":"Eye gaze tracking techniques for interactive applications","volume":"98","author":"morimoto","year":"2005","journal-title":"Comput Vis Image Understand"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1109\/TBME.2007.895750"},{"key":"ref2","article-title":"Revisiting video saliency prediction in the deep learning era","author":"wang","year":"2019","journal-title":"IEEE Trans Pattern Anal Mach Intell"},{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2019.00875"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1109\/CVPRW.2018.00290"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1007\/s00138-017-0852-4"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.239"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2017.341"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1109\/CVPRW.2017.284"},{"key":"ref26","first-page":"100","article-title":"Appearance-based gaze estimation via evaluation-guided asymmetric regression","author":"cheng","year":"2018","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1007\/s11263-008-0152-6"},{"key":"ref50","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2012.101"},{"key":"ref51","first-page":"1097","article-title":"ImageNet classification with deep convolutional neural networks","author":"alex","year":"2012","journal-title":"Proc Annu Conf Neural Inf Process Syst (NeurIPS)"},{"key":"ref59","first-page":"309","article-title":"Appearance-based gaze estimation using dilated-convolutions","author":"chen","year":"2018","journal-title":"Proc Asian Conf Comput Vis (ACCV)"},{"key":"ref58","first-page":"7743","article-title":"Mixed effects neural networks (MeNets) with applications to gaze estimation","author":"xiong","year":"2019","journal-title":"Proc IEEE\/CVF Conf Comput Vis Pattern Recognit (CVPR)"},{"key":"ref57","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2019.00946"},{"key":"ref56","first-page":"334","article-title":"RT-GENE: Real-time eye gaze estimation in natural environments","author":"fischer","year":"2018","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref55","doi-asserted-by":"crossref","first-page":"255","DOI":"10.1145\/2578153.2578190","article-title":"EYEDIAP: A database for the development and evaluation of gaze estimation algorithms from RGB and RGB-D cameras","author":"mora","year":"2014","journal-title":"Proceedings of the Eye Tracking Research & Application Symposium (ETRA)"},{"key":"ref54","first-page":"1","article-title":"Adam: A method for stochastic optimization","author":"kingma","year":"2015","journal-title":"Proc Int Conf Learn Represent (ICLR)"},{"key":"ref53","author":"abadi","year":"2015","journal-title":"TensorFlow Large-Scale Machine Learning on Heterogeneous Systems"},{"key":"ref52","first-page":"448","article-title":"Batch normalization: Accelerating deep network training by reducing internal covariate shift","author":"ioffe","year":"2015","journal-title":"Proc Int Conf Mach Learn (ICML)"},{"key":"ref10","article-title":"Inferring salient objects from human fixations","author":"wang","year":"2019","journal-title":"IEEE Trans Pattern Anal Mach Intell"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2017.2787612"},{"key":"ref40","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2017.2657880"},{"key":"ref12","article-title":"A fixation-based 360&#x00B0; benchmark dataset for salient object detection","author":"zhang","year":"2020","journal-title":"arXiv 2001 07960"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00559"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2013.2279941"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1145\/3343031.3350896"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2009.30"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2014.235"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/ICPR.2014.210"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2015.7299081"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1145\/2984511.2984536"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1145\/3126594.3126614"},{"key":"ref6","first-page":"186","article-title":"Salient objects in clutter: Bringing salient object detection to the foreground","author":"fan","year":"2018","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2014.2337758"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2017.2754941"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2017.2662005"},{"key":"ref49","first-page":"383","article-title":"Connecting gaze, scene, and attention: Generalized attention estimation via joint modeling of gaze and scene saliency","author":"chong","year":"2018","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2016.2601784"},{"key":"ref46","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2015.2445295"},{"key":"ref45","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2017.2778103"},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2019.00582"},{"key":"ref47","first-page":"1","article-title":"A coarse-to-fine adaptive network for appearance-based gaze estimation","author":"cheng","year":"2020","journal-title":"Proc AAAI Conf Artif Intell (AAAI)"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2019.2898567"},{"key":"ref41","first-page":"230","article-title":"Sparse and semi-supervised visual mapping with the \n$\\text{S}^{\\wedge}~3$\nGP","author":"williams","year":"2006","journal-title":"Proc IEEE Conf Comput Vis Pattern Recognit (CVPR)"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.1145\/2857491.2857492"},{"key":"ref43","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2018.2840724"}],"container-title":["IEEE Transactions on Image Processing"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/83\/8835130\/09050633.pdf?arnumber=9050633","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,4,27]],"date-time":"2022-04-27T14:38:41Z","timestamp":1651070321000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9050633\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2020]]},"references-count":59,"URL":"https:\/\/doi.org\/10.1109\/tip.2020.2982828","relation":{},"ISSN":["1057-7149","1941-0042"],"issn-type":[{"value":"1057-7149","type":"print"},{"value":"1941-0042","type":"electronic"}],"subject":[],"published":{"date-parts":[[2020]]}}}