{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,7]],"date-time":"2026-04-07T16:50:15Z","timestamp":1775580615912,"version":"3.50.1"},"reference-count":53,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"12","license":[{"start":{"date-parts":[[2024,12,1]],"date-time":"2024-12-01T00:00:00Z","timestamp":1733011200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2024,12,1]],"date-time":"2024-12-01T00:00:00Z","timestamp":1733011200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,12,1]],"date-time":"2024-12-01T00:00:00Z","timestamp":1733011200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100001809","name":"Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62201233"],"award-info":[{"award-number":["62201233"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/100022957","name":"Double Thousand Plan of Jiangxi Province","doi-asserted-by":"publisher","award":["jxsq2023201118"],"award-info":[{"award-number":["jxsq2023201118"]}],"id":[{"id":"10.13039\/100022957","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Outstanding Youth Fund Program of Jiangxi Province","award":["20232ACB212004"],"award-info":[{"award-number":["20232ACB212004"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Trans. Circuits Syst. Video Technol."],"published-print":{"date-parts":[[2024,12]]},"DOI":"10.1109\/tcsvt.2024.3448351","type":"journal-article","created":{"date-parts":[[2024,8,23]],"date-time":"2024-08-23T18:04:17Z","timestamp":1724436257000},"page":"13401-13412","source":"Crossref","is-referenced-by-count":6,"title":["Dual Protection for Image Privacy and Copyright via Traceable Adversarial Examples"],"prefix":"10.1109","volume":"34","author":[{"ORCID":"https:\/\/orcid.org\/0000-0003-3385-8364","authenticated-orcid":false,"given":"Ming","family":"Li","sequence":"first","affiliation":[{"name":"College of Computer and Information Engineering, Henan Normal University, Xinxiang, China"}]},{"given":"Zhaoli","family":"Yang","sequence":"additional","affiliation":[{"name":"College of Computer and Information Engineering, Henan Normal University, Xinxiang, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-5532-3999","authenticated-orcid":false,"given":"Tao","family":"Wang","sequence":"additional","affiliation":[{"name":"College of Computer Science and Technology, Nanjing University of Aeronautics and Astronautics, Nanjing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-8183-8435","authenticated-orcid":false,"given":"Yushu","family":"Zhang","sequence":"additional","affiliation":[{"name":"College of Computer Science and Technology, Nanjing University of Aeronautics and Astronautics, Nanjing, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-3098-4640","authenticated-orcid":false,"given":"Wenying","family":"Wen","sequence":"additional","affiliation":[{"name":"School of Computer and Artificial Intelligence, Jiangxi University of Finance and Economics, Nanchang, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/CVPRW.2018.00207"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR52688.2022.00749"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/TCSVT.2021.3065199"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/TCSVT.2019.2915116"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1109\/TCSVT.2022.3210010"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1109\/TCSVT.2022.3207008"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.24963\/ijcai.2022\/107"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/ICME52920.2022.9859600"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV51070.2023.00402"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1109\/TIFS.2024.3397043"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/TCSVT.2021.3055072"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1109\/TCSVT.2020.3030671"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1109\/IJCNN52387.2021.9534119"},{"key":"ref14","article-title":"Attacking optical character recognition (OCR) systems with adversarial watermarks","author":"Chen","year":"2020","journal-title":"arXiv:2002.03095"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1109\/ICCECE54139.2022.9712846"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1016\/j.jisa.2023.103662"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.23919\/EUSIPCO.2018.8553343"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1145\/3394171.3413976"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1109\/TC.2021.3065172"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP.2019.8682351"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1016\/j.asoc.2023.110777"},{"key":"ref22","article-title":"Intriguing properties of neural networks","author":"Szegedy","year":"2013","journal-title":"arXiv:1312.6199"},{"key":"ref23","article-title":"Explaining and harnessing adversarial examples","author":"Goodfellow","year":"2014","journal-title":"arXiv:1412.6572"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1109\/SP.2017.49"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.282"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.1109\/TEVC.2019.2890858"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.24963\/ijcai.2018\/543"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1007\/s10462-021-10125-w"},{"key":"ref29","first-page":"4584","article-title":"Towards robust detection of adversarial examples","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"31","author":"Pang"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v32i1.11828"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV48922.2021.00778"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v35i3.16371"},{"key":"ref33","first-page":"27338","article-title":"Improving adversarial robustness via mutual information estimation","volume-title":"Proc. Int. Conf. Mach. Learn. (ICML)","author":"Zhou"},{"key":"ref34","doi-asserted-by":"publisher","DOI":"10.1109\/TNNLS.2022.3183095"},{"key":"ref35","first-page":"25595","article-title":"Understanding robust overfitting of adversarial training and beyond","volume-title":"Proc. Int. Conf. Mach. Learn. (ICML)","author":"Yu"},{"key":"ref36","first-page":"12062","article-title":"Adversarial purification with score-based generative models","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Yoon"},{"key":"ref37","doi-asserted-by":"publisher","DOI":"10.1016\/j.patcog.2021.108249"},{"key":"ref38","first-page":"12835","article-title":"Towards defending against adversarial examples via attack-invariant features","volume-title":"Proc. 38th Int. Conf. Mach. Learn.","author":"Zhou"},{"key":"ref39","first-page":"42517","article-title":"Eliminating adversarial noise via information discard and robust representation restoration","volume-title":"Proc. Int. Conf. Mach. Learn. (ICML)","author":"Zhou"},{"key":"ref40","article-title":"Feature squeezing: Detecting adversarial examples in deep neural networks","author":"Xu","year":"2017","journal-title":"arXiv:1704.01155"},{"key":"ref41","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2019.00095"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2019.00624"},{"key":"ref43","article-title":"A robust blind watermarking using convolutional neural network","author":"Mun","year":"2017","journal-title":"arXiv:1704.03248"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.1016\/j.eswa.2019.113157"},{"key":"ref45","doi-asserted-by":"publisher","DOI":"10.1109\/TCSVT.2020.2998476"},{"key":"ref46","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-01267-0_40"},{"key":"ref47","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2017.74"},{"key":"ref48","article-title":"Very deep convolutional networks for large-scale image recognition","author":"Simonyan","year":"2014","journal-title":"arXiv:1409.1556"},{"key":"ref49","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.90"},{"key":"ref50","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.308"},{"key":"ref51","article-title":"SqueezeNet: AlexNet-level accuracy with 50\u00d7 fewer parameters and <0.5 MB model size","author":"Iandola","year":"2016","journal-title":"arXiv:1602.07360"},{"key":"ref52","first-page":"1802","article-title":"Exploring the landscape of spatial robustness","volume-title":"Proc. Int. Conf. Mach. Learn.","author":"Engstrom"},{"key":"ref53","article-title":"Decision-based adversarial attacks: Reliable attacks against black-box machine learning models","author":"Brendel","year":"2017","journal-title":"arXiv:1712.04248"}],"container-title":["IEEE Transactions on Circuits and Systems for Video Technology"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/76\/10811783\/10644094.pdf?arnumber=10644094","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,1,15]],"date-time":"2025-01-15T20:22:35Z","timestamp":1736972555000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10644094\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,12]]},"references-count":53,"journal-issue":{"issue":"12"},"URL":"https:\/\/doi.org\/10.1109\/tcsvt.2024.3448351","relation":{},"ISSN":["1051-8215","1558-2205"],"issn-type":[{"value":"1051-8215","type":"print"},{"value":"1558-2205","type":"electronic"}],"subject":[],"published":{"date-parts":[[2024,12]]}}}