{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,30]],"date-time":"2026-04-30T20:40:01Z","timestamp":1777581601899,"version":"3.51.4"},"reference-count":124,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2023,1,1]],"date-time":"2023-01-01T00:00:00Z","timestamp":1672531200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by-nc-nd\/4.0\/"}],"funder":[{"DOI":"10.13039\/501100021171","name":"Basic and Applied Basic Research Foundation of Guangdong Province","doi-asserted-by":"publisher","award":["2022A1515140066"],"award-info":[{"award-number":["2022A1515140066"]}],"id":[{"id":"10.13039\/501100021171","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Henan Provincial Key Scientific and Technological Project","award":["222102220011"],"award-info":[{"award-number":["222102220011"]}]},{"name":"Guangdong Provincial Key Laboratory of Manufacturing Equipment Digitization","award":["2023B1212060012"],"award-info":[{"award-number":["2023B1212060012"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Access"],"published-print":{"date-parts":[[2023]]},"DOI":"10.1109\/access.2023.3339561","type":"journal-article","created":{"date-parts":[[2023,12,5]],"date-time":"2023-12-05T18:25:42Z","timestamp":1701800742000},"page":"139017-139038","source":"Crossref","is-referenced-by-count":111,"title":["A Comprehensive Review of Deep Learning-Based PCB Defect Detection"],"prefix":"10.1109","volume":"11","author":[{"given":"Xing","family":"Chen","sequence":"first","affiliation":[{"name":"School of Artificial Intelligence and Software Engineering, Nanyang Normal University, Nanyang, China"}]},{"given":"Yonglei","family":"Wu","sequence":"additional","affiliation":[{"name":"Henan Key Laboratory of Intelligent Manufacturing of Mechanical Equipment, Zhengzhou University of Light Industry, Zhengzhou, China"}]},{"given":"Xingyou","family":"He","sequence":"additional","affiliation":[{"name":"Henan Key Laboratory of Intelligent Manufacturing of Mechanical Equipment, Zhengzhou University of Light Industry, Zhengzhou, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0003-4336-179X","authenticated-orcid":false,"given":"Wuyi","family":"Ming","sequence":"additional","affiliation":[{"name":"Guangdong Provincial Key Laboratory of Digital Manufacturing Equipment, Guangdong HUST Industrial Technology Research Institute, Dongguan, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/access.2023.3245093"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1117\/12.920531"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.3390\/s23052766"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/icaica52286.2021.9498174"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1016\/j.eswa.2023.120029"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1016\/j.resconrec.2021.105963"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1016\/j.eswa.2012.02.100"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1049\/trit.2019.0019"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1109\/cscwd49262.2021.9437846"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1109\/icimtech.2017.8273538"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/icicct.2018.8473285"},{"key":"ref12","doi-asserted-by":"publisher","DOI":"10.1006\/cviu.1996.0020"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.3389\/fpls.2020.00510"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1016\/j.measurement.2021.109973"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1016\/j.measurement.2020.107722"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1016\/j.measurement.2019.04.087"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1109\/access.2021.3116131"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/access.2023.3257045"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1109\/access.2022.3161575"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1016\/j.compag.2018.02.016"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1111\/jfpe.13974"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.3390\/s19051058"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.3390\/s21144749"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1109\/mgrs.2016.2540798"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.3389\/fnbot.2022.1074862"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.3390\/rs14174208"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.1016\/j.ijhydene.2022.10.261"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1016\/j.est.2023.107868"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1016\/j.neunet.2014.09.003"},{"key":"ref30","doi-asserted-by":"publisher","DOI":"10.1038\/nature14539"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.1109\/5.726791"},{"key":"ref32","article-title":"Unsupervised representation learning with deep convolutional generative adversarial networks","author":"Radford","year":"2015","journal-title":"arXiv:1511.06434"},{"key":"ref33","first-page":"1","article-title":"ImageNet classification with deep convolutional neural networks","volume-title":"Adv. Neural Inf. Process. Syst","author":"Krizhevsky","year":"2012"},{"key":"ref34","first-page":"315","article-title":"Deep sparse rectifier neural networks","volume-title":"Proc. IEEE IWAENC","volume":"15","author":"Glorot"},{"key":"ref35","doi-asserted-by":"publisher","DOI":"10.1038\/323533a0"},{"key":"ref36","article-title":"Very deep convolutional networks for large-scale image recognition","author":"Simonyan","year":"2014","journal-title":"arXiv:1409.1556"},{"key":"ref37","article-title":"Going deeper with convolutions","author":"Szegedy","year":"2014","journal-title":"arXiv:1409.4842"},{"key":"ref38","article-title":"Deep residual learning for image recognition","author":"He","year":"2015","journal-title":"arXiv:1512.03385"},{"key":"ref39","doi-asserted-by":"publisher","DOI":"10.1108\/ssmt-03-2016-0005"},{"key":"ref40","article-title":"Recurrent models of visual attention","author":"Mnih","year":"2014","journal-title":"arXiv:1406.6247"},{"key":"ref41","article-title":"Neural machine translation by jointly learning to align and translate","author":"Bahdanau","year":"2014","journal-title":"arXiv:1409.0473"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D15-1166"},{"key":"ref43","article-title":"Attention is all you need","author":"Vaswani","year":"2017","journal-title":"arXiv:1706.03762"},{"key":"ref44","article-title":"Squeeze-and-excitation networks","author":"Hu","year":"2017","journal-title":"arXiv:1709.01507"},{"key":"ref45","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-01234-2_1"},{"key":"ref46","article-title":"ECA-Net: Efficient channel attention for deep convolutional neural networks","author":"Wang","year":"2019","journal-title":"arXiv:1910.03151"},{"key":"ref47","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR46437.2021.01350"},{"key":"ref48","article-title":"BERT: Pre-training of deep bidirectional transformers for language understanding","author":"Devlin","year":"2018","journal-title":"arXiv:1810.04805"},{"key":"ref49","article-title":"Training data-efficient image transformers & distillation through attention","author":"Touvron","year":"2020","journal-title":"arXiv:2012.12877"},{"key":"ref50","article-title":"An image is worth 16\u00d716 words: Transformers for image recognition at scale","author":"Dosovitskiy","year":"2020","journal-title":"arXiv:2010.11929"},{"key":"ref51","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/N18-2074"},{"key":"ref52","first-page":"1","article-title":"Stand-alone self-attention in vision models","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Ramachandran"},{"key":"ref53","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-030-58452-8_13"},{"key":"ref54","first-page":"1","article-title":"Deformable DETR: Deformable transformers for end-to-end object detection","volume-title":"Proc. Int. Conf. Learn. Represent.","author":"Zhu"},{"key":"ref55","article-title":"Swin transformer v2: Scaling up capacity and resolution","author":"Liu","year":"2021","journal-title":"arXiv:2111.09883"},{"key":"ref56","doi-asserted-by":"publisher","DOI":"10.1109\/ICCV.2019.00338"},{"key":"ref57","doi-asserted-by":"publisher","DOI":"10.1007\/s11263-019-01247-4"},{"key":"ref58","doi-asserted-by":"publisher","DOI":"10.1109\/tnnls.2018.2876865"},{"key":"ref59","doi-asserted-by":"publisher","DOI":"10.1109\/cvpr.2014.81"},{"key":"ref60","doi-asserted-by":"publisher","DOI":"10.1109\/iccv.2015.169"},{"key":"ref61","doi-asserted-by":"publisher","DOI":"10.1109\/tpami.2016.2577031"},{"key":"ref62","doi-asserted-by":"publisher","DOI":"10.1109\/iccv.2017.322"},{"key":"ref63","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-319-46448-0_2"},{"key":"ref64","doi-asserted-by":"publisher","DOI":"10.1109\/cvpr.2016.91"},{"key":"ref65","doi-asserted-by":"publisher","DOI":"10.1109\/cvpr.2017.690"},{"key":"ref66","article-title":"YOLOv3: An incremental improvement","author":"Redmon","year":"2018","journal-title":"arXiv:1804.02767"},{"key":"ref67","article-title":"YOLOv4: Optimal speed and accuracy of object detection","author":"Bochkovskiy","year":"2020","journal-title":"arXiv.2004.10934"},{"key":"ref68","article-title":"YOLOv6: A single-stage object detection framework for industrial applications","author":"Li","year":"2022","journal-title":"arXiv:2209.02976"},{"key":"ref69","article-title":"YOLOv7: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors","author":"Wang","year":"2022","journal-title":"arXiv:2207.02696"},{"key":"ref70","doi-asserted-by":"publisher","DOI":"10.1109\/jproc.2023.3238524"},{"key":"ref71","doi-asserted-by":"publisher","DOI":"10.1088\/1742-6596\/1827\/1\/012167"},{"key":"ref72","article-title":"Mish: A self regularized non-monotonic activation function","author":"Misra","year":"2019","journal-title":"arXiv:1908.08681"},{"key":"ref73","doi-asserted-by":"publisher","DOI":"10.3389\/fphy.2021.708097"},{"key":"ref74","doi-asserted-by":"publisher","DOI":"10.1109\/tpami.2018.2858826"},{"key":"ref75","doi-asserted-by":"publisher","DOI":"10.1109\/cvpr.2017.106"},{"issue":"6","key":"ref76","first-page":"319","article-title":"Multi-target detection of PCB defects based on improved SSD","volume":"8","author":"Jiang","year":"2022","journal-title":"Int. Core J. Eng."},{"key":"ref77","doi-asserted-by":"publisher","DOI":"10.1155\/2022\/7536711"},{"key":"ref78","first-page":"1","article-title":"Understanding the effective receptive field in deep convolutional neural networks","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","author":"Luo"},{"key":"ref79","doi-asserted-by":"publisher","DOI":"10.1109\/tpami.2017.2699184"},{"key":"ref80","article-title":"Rethinking atrous convolution for semantic image segmentation","author":"Chen","year":"2017","journal-title":"arXiv:1706.05587"},{"key":"ref81","doi-asserted-by":"publisher","DOI":"10.3390\/s22207971"},{"key":"ref82","doi-asserted-by":"publisher","DOI":"10.1109\/access.2022.3198994"},{"key":"ref83","doi-asserted-by":"publisher","DOI":"10.1109\/jsen.2022.3208580"},{"key":"ref84","doi-asserted-by":"publisher","DOI":"10.1117\/12.2652341"},{"key":"ref85","article-title":"Learning spatial fusion for single-shot object detection","author":"Liu","year":"2019","journal-title":"arXiv:1911.09516"},{"key":"ref86","article-title":"Global attention mechanism: Retain information to enhance channel-spatial interactions","author":"Liu","year":"2021","journal-title":"arXiv:2112.05561"},{"key":"ref87","doi-asserted-by":"publisher","DOI":"10.1109\/access.2022.3214306"},{"key":"ref88","doi-asserted-by":"publisher","DOI":"10.1016\/j.rineng.2023.100968"},{"key":"ref89","doi-asserted-by":"publisher","DOI":"10.1007\/s10489-022-03633-x"},{"key":"ref90","doi-asserted-by":"publisher","DOI":"10.1109\/icce-china.2018.8448674"},{"key":"ref91","doi-asserted-by":"publisher","DOI":"10.1109\/cvpr.2016.89"},{"key":"ref92","doi-asserted-by":"publisher","DOI":"10.1109\/access.2020.3001349"},{"key":"ref93","doi-asserted-by":"publisher","DOI":"10.1109\/cvpr.2019.00308"},{"key":"ref94","doi-asserted-by":"publisher","DOI":"10.1145\/3445815.3445853"},{"key":"ref95","doi-asserted-by":"publisher","DOI":"10.1109\/access.2022.3168861"},{"key":"ref96","doi-asserted-by":"publisher","DOI":"10.1109\/access.2022.3228206"},{"key":"ref97","doi-asserted-by":"publisher","DOI":"10.3390\/electronics12092120"},{"key":"ref98","article-title":"CrossFormer: A versatile vision transformer hinging on cross-scale attention","author":"Wang","year":"2021","journal-title":"arXiv:2108.00154"},{"key":"ref99","article-title":"CrossFormer++: A versatile vision transformer hinging on cross-scale attention","author":"Wang","year":"2023","journal-title":"arXiv:2303.06908"},{"key":"ref100","article-title":"Vision transformer adapter for dense predictions","author":"Chen","year":"2022","journal-title":"arXiv:2205.08534"},{"key":"ref101","doi-asserted-by":"publisher","DOI":"10.1007\/s11263-009-0275-4"},{"key":"ref102","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2019.00075"},{"key":"ref103","doi-asserted-by":"publisher","DOI":"10.1109\/tpami.2011.155"},{"key":"ref104","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-319-10602-1_48"},{"key":"ref105","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v34i07.6999"},{"key":"ref106","article-title":"Focal and efficient IOU loss for accurate bounding box regression","author":"Zhang","year":"2021","journal-title":"arXiv:2101.08158"},{"key":"ref107","article-title":"Alpha-IoU: A family of power intersection over union losses for bounding box regression","author":"He","year":"2021","journal-title":"arXiv:2110.13675"},{"key":"ref108","article-title":"SIoU loss: More powerful learning for bounding box regression","author":"Gevorgyan","year":"2022","journal-title":"arXiv:2205.12740"},{"key":"ref109","doi-asserted-by":"publisher","DOI":"10.1109\/access.2023.3233964"},{"key":"ref110","doi-asserted-by":"publisher","DOI":"10.1016\/j.isprsjprs.2016.03.014"},{"key":"ref111","doi-asserted-by":"publisher","DOI":"10.1109\/tgrs.2019.2899955"},{"key":"ref112","doi-asserted-by":"publisher","DOI":"10.1109\/tgrs.2023.3258666"},{"key":"ref113","doi-asserted-by":"publisher","DOI":"10.1016\/j.isprsjprs.2020.01.025"},{"key":"ref114","doi-asserted-by":"publisher","DOI":"10.1109\/cvprw.2018.00101"},{"key":"ref115","article-title":"Online PCB defect detector on a new PCB defect dataset","author":"Tang","year":"2019","journal-title":"arXiv:1902.06197"},{"key":"ref116","doi-asserted-by":"publisher","DOI":"10.1049\/joe.2019.1183"},{"key":"ref117","doi-asserted-by":"publisher","DOI":"10.1007\/s10836-022-06026-7"},{"key":"ref118","doi-asserted-by":"publisher","DOI":"10.3390\/s23218780"},{"key":"ref119","doi-asserted-by":"publisher","DOI":"10.1016\/j.jmapro.2021.10.035"},{"key":"ref120","doi-asserted-by":"publisher","DOI":"10.1016\/j.ijheatmasstransfer.2022.122563"},{"key":"ref121","doi-asserted-by":"publisher","DOI":"10.1016\/j.optlastec.2022.108760"},{"key":"ref122","doi-asserted-by":"publisher","DOI":"10.1016\/j.compositesb.2023.110827"},{"key":"ref123","doi-asserted-by":"publisher","DOI":"10.3390\/met13050839"},{"key":"ref124","doi-asserted-by":"publisher","DOI":"10.1016\/j.ijmachtools.2014.01.004"}],"container-title":["IEEE Access"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/6287639\/10005208\/10343144.pdf?arnumber=10343144","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,2,6]],"date-time":"2024-02-06T19:15:48Z","timestamp":1707246948000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10343144\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2023]]},"references-count":124,"URL":"https:\/\/doi.org\/10.1109\/access.2023.3339561","relation":{},"ISSN":["2169-3536"],"issn-type":[{"value":"2169-3536","type":"electronic"}],"subject":[],"published":{"date-parts":[[2023]]}}}