{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,11]],"date-time":"2026-03-11T06:34:13Z","timestamp":1773210853206,"version":"3.50.1"},"reference-count":31,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2020,1,1]],"date-time":"2020-01-01T00:00:00Z","timestamp":1577836800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/legalcode"}],"funder":[{"name":"Humanities and Social Sciences Fund of the Ministry of Education","award":["19YJC760150"],"award-info":[{"award-number":["19YJC760150"]}]},{"name":"Beijing Social Science Foundation","award":["18YTC038"],"award-info":[{"award-number":["18YTC038"]}]},{"DOI":"10.13039\/501100004826","name":"Beijing Natural Science Foundation","doi-asserted-by":"publisher","award":["4182018"],"award-info":[{"award-number":["4182018"]}],"id":[{"id":"10.13039\/501100004826","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100004826","name":"Beijing Natural Science Foundation","doi-asserted-by":"publisher","award":["4154067"],"award-info":[{"award-number":["4154067"]}],"id":[{"id":"10.13039\/501100004826","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100004826","name":"Beijing Natural Science Foundation","doi-asserted-by":"publisher","award":["4194076"],"award-info":[{"award-number":["4194076"]}],"id":[{"id":"10.13039\/501100004826","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation","doi-asserted-by":"publisher","award":["61402016"],"award-info":[{"award-number":["61402016"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100011160","name":"open funding project of State Key Laboratory of Virtual Reality Technology and Systems, Beihang University","doi-asserted-by":"publisher","award":["VRLAB2020B10"],"award-info":[{"award-number":["VRLAB2020B10"]}],"id":[{"id":"10.13039\/501100011160","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Beijing Youth Talent Foundation","award":["2016000026833ZK09"],"award-info":[{"award-number":["2016000026833ZK09"]}]},{"name":"NCUT Foundation","award":["XN018001"],"award-info":[{"award-number":["XN018001"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Access"],"published-print":{"date-parts":[[2020]]},"DOI":"10.1109\/access.2020.3009470","type":"journal-article","created":{"date-parts":[[2020,7,16]],"date-time":"2020-07-16T20:04:45Z","timestamp":1594929885000},"page":"132002-132011","source":"Crossref","is-referenced-by-count":26,"title":["Detail-Preserving CycleGAN-AdaIN Framework for Image-to-Ink Painting Translation"],"prefix":"10.1109","volume":"8","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-3329-539X","authenticated-orcid":false,"given":"Fengquan","family":"Zhang","sequence":"first","affiliation":[]},{"given":"Huaming","family":"Gao","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0003-2478-0024","authenticated-orcid":false,"given":"Yuping","family":"Lai","sequence":"additional","affiliation":[]}],"member":"263","reference":[{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/TIP.2003.819861"},{"key":"ref30","first-page":"723","article-title":"A kernel two-sample test","volume":"13","author":"gretton","year":"2012","journal-title":"J Mach Learn Res"},{"key":"ref10","article-title":"Least squares generative adversarial networks","author":"mao","year":"2016","journal-title":"arXiv 1611 04076"},{"key":"ref11","article-title":"Towards principled methods for training generative adversarial networks","author":"arjovsky","year":"2017","journal-title":"arXiv 1701 04862"},{"key":"ref12","first-page":"214","article-title":"Wasserstein generative adversarial networks","author":"arjovsky","year":"2017","journal-title":"Proc Int Conf Mach Learn (ICML)"},{"key":"ref13","article-title":"Improved training of Wasserstein GANs","author":"gulrajani","year":"2017","journal-title":"arXiv 1704 00028"},{"key":"ref14","article-title":"Unsupervised representation learning with deep convolutional generative adversarial networks","author":"radford","year":"2015","journal-title":"arXiv 1511 06434"},{"key":"ref15","article-title":"Conditional generative adversarial nets","author":"mirza","year":"2014","journal-title":"arXiv 1411 1784"},{"key":"ref16","article-title":"Deep generative image models using a Laplacian pyramid of adversarial networks","author":"denton","year":"2015","journal-title":"arXiv 1506 05751"},{"key":"ref17","article-title":"Progressive growing of GANs for improved quality, stability, and variation","author":"karras","year":"2017","journal-title":"arXiv 1710 10196"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2019.00453"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2018.00916"},{"key":"ref28","first-page":"752","article-title":"One-sided unsupervised domain mapping","author":"benaim","year":"2017","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1088\/1742-6596\/1004\/1\/012026"},{"key":"ref27","article-title":"A neural algorithm of artistic style","author":"gatys","year":"2015","journal-title":"arXiv 1508 06576"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/ACSSC.2003.1292216"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-00764-5_3"},{"key":"ref29","first-page":"6626","article-title":"GANs trained by a two time-scale update rule converge to a local Nash equilibrium","author":"heusel","year":"2017","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.12792\/JIIAE.5.65"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/ICEIEC.2019.8784632"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1145\/3240508.3240655"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2017.167"},{"key":"ref9","first-page":"2672","article-title":"Generative adversarial nets","author":"goodfellow","year":"2014","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2017.244"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-01246-5_3"},{"key":"ref22","first-page":"2172","article-title":"InfoGAN: Interpretable representation learning by information maximizing generative adversarial nets","author":"chen","year":"2016","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-01219-9_11"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2017.632"},{"key":"ref23","article-title":"Adversarial feature learning","author":"donahue","year":"2016","journal-title":"arXiv 1605 09782"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2019.00427"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2019.00252"}],"container-title":["IEEE Access"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/6287639\/8948470\/09142197.pdf?arnumber=9142197","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,1,12]],"date-time":"2022-01-12T01:08:43Z","timestamp":1641949723000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9142197\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2020]]},"references-count":31,"URL":"https:\/\/doi.org\/10.1109\/access.2020.3009470","relation":{},"ISSN":["2169-3536"],"issn-type":[{"value":"2169-3536","type":"electronic"}],"subject":[],"published":{"date-parts":[[2020]]}}}