{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,12,4]],"date-time":"2025-12-04T06:13:48Z","timestamp":1764828828884,"version":"3.37.3"},"reference-count":48,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2020,1,1]],"date-time":"2020-01-01T00:00:00Z","timestamp":1577836800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/legalcode"}],"funder":[{"name":"Institute of Information and Communications Technology Planning and Evaluation (IITP) Grant funded by the Korean Government (MSIT)","award":["2019-0-00533","2019-0-01343"],"award-info":[{"award-number":["2019-0-00533","2019-0-01343"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Access"],"published-print":{"date-parts":[[2020]]},"DOI":"10.1109\/access.2020.3040849","type":"journal-article","created":{"date-parts":[[2020,11,26]],"date-time":"2020-11-26T20:56:11Z","timestamp":1606424171000},"page":"215365-215374","source":"Crossref","is-referenced-by-count":16,"title":["Speculative Backpropagation for CNN Parallel Training"],"prefix":"10.1109","volume":"8","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-5831-2176","authenticated-orcid":false,"given":"Sangwoo","family":"Park","sequence":"first","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0002-6377-5482","authenticated-orcid":false,"given":"Taeweon","family":"Suh","sequence":"additional","affiliation":[]}],"member":"263","reference":[{"key":"ref39","article-title":"Network in network","author":"lin","year":"2013","journal-title":"arXiv 1312 4400"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1109\/TPAMI.2016.2644615"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.1145\/3218603.3218615"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.1109\/ICFPT47387.2019.00009"},{"key":"ref31","article-title":"DoReFa-net: Training low bitwidth convolutional neural networks with low bitwidth gradients","author":"zhou","year":"2016","journal-title":"arXiv 1606 06160 [cs]"},{"key":"ref30","article-title":"Binarized neural networks: Training deep neural networks with weights and activations constrained to +1 or ?1","author":"courbariaux","year":"2016","journal-title":"arXiv 1602 02830 [cs]"},{"key":"ref37","first-page":"1097","article-title":"Imagenet classification with deep convolutional neural networks","author":"krizhevsky","year":"2012","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref36","article-title":"Decoupled parallel backpropagation with convergence guarantee","author":"huo","year":"2018","journal-title":"arXiv 1804 10574"},{"key":"ref35","first-page":"1037","article-title":"Direct feedback alignment provides learning in deep neural networks","author":"n\u00f8kland","year":"2016","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref34","first-page":"1627","article-title":"Decoupled neural interfaces using synthetic gradients","volume":"70","author":"jaderberg","year":"2017","journal-title":"Proc 34th Int Conf Mach Learn"},{"key":"ref10","article-title":"Very deep convolutional networks for large-scale image recognition","author":"simonyan","year":"2014","journal-title":"arXiv 1409 1556"},{"journal-title":"MNIST Handwritten Digit Database","year":"2010","author":"lecun","key":"ref40"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2015.7298594"},{"key":"ref12","first-page":"215","article-title":"An analysis of single-layer networks in unsupervised feature learning","author":"coates","year":"2011","journal-title":"Proc 14th Int Conf Artif Intell Statist"},{"key":"ref13","first-page":"2834","article-title":"On model parallelization and scheduling strategies for distributed machine learning","author":"lee","year":"2014","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1145\/2901318.2901331"},{"key":"ref15","first-page":"265","article-title":"Tensorflow: A system for large-scale machine learning","author":"abadi","year":"2016","journal-title":"Proc USENIX Symp Operat Syst Des Implement"},{"key":"ref16","first-page":"199","article-title":"Accelerated Gaussian convolution in a data assimilation scenario","author":"de luca","year":"2020","journal-title":"Proc Int Conf Comput Sci"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1002\/qj.2997"},{"key":"ref18","first-page":"2595","article-title":"Parallelized stochastic gradient descent","author":"zinkevich","year":"2010","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref19","first-page":"693","article-title":"Hogwild: A lock-free approach to parallelizing stochastic gradient descent","author":"recht","year":"2011","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref28","doi-asserted-by":"publisher","DOI":"10.1007\/978-1-4614-1791-0"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1007\/s10462-009-9124-7"},{"key":"ref27","article-title":"AMPNet: Asynchronous model-parallel training for dynamic neural networks","author":"gaunt","year":"2017","journal-title":"arXiv 1705 09786"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/TASL.2011.2134090"},{"key":"ref6","article-title":"Multi-column deep neural networks for image classification","author":"cire?an","year":"2012","journal-title":"arXiv 1202 2745"},{"key":"ref29","doi-asserted-by":"publisher","DOI":"10.1145\/2678373.2665678"},{"key":"ref5","first-page":"10","article-title":"A new look at the system, algorithm and theory foundations of large-scale distributed machine learning","author":"xing","year":"2015","journal-title":"Proc Int Conf Knowl Discovery Data Mining"},{"key":"ref8","first-page":"265","article-title":"On optimization methods for deep learning","author":"le","year":"2011","journal-title":"Proc Int Conf Int Conf Mach Learn"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2016.90"},{"key":"ref2","article-title":"Deep big simple neural nets excel on handwritten digit recognition","author":"claudiu ciresan","year":"2010","journal-title":"arXiv 1003 0358"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1109\/5.726791"},{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1145\/1390156.1390177"},{"key":"ref46","doi-asserted-by":"publisher","DOI":"10.1145\/2541940.2541967"},{"key":"ref20","article-title":"Ai benchmark: Running deep neural networks on Android smartphones","author":"ignatov","year":"2018","journal-title":"Proc Eur Conf Comput Vis (ECCV)"},{"key":"ref45","first-page":"5109","article-title":"Regularizing deep neural networks by noise: Its interpretation and optimization","author":"noh","year":"2017","journal-title":"Proc Adv Neural Inf Process"},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR.2009.5206848"},{"key":"ref22","first-page":"1223","article-title":"Large scale distributed deep networks","author":"dean","year":"2012","journal-title":"Proc Adv Neural Inf Process Syst"},{"key":"ref47","doi-asserted-by":"publisher","DOI":"10.1145\/2749469.2750389"},{"journal-title":"Startup Spins Whole Wafer for AI","year":"2019","author":"merritt","key":"ref21"},{"key":"ref42","article-title":"Adam: A method for stochastic optimization","author":"kingma","year":"2014","journal-title":"arXiv 1412 6980"},{"key":"ref24","article-title":"One weird trick for parallelizing convolutional neural networks","author":"krizhevsky","year":"2014","journal-title":"arXiv 1404 5997"},{"journal-title":"Learning multiple layers of features from tiny images","year":"2009","author":"krizhevsky","key":"ref41"},{"key":"ref23","article-title":"MXNet: A flexible and efficient machine learning library for heterogeneous distributed systems","author":"chen","year":"2015","journal-title":"arXiv 1512 01274"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.1145\/2847263.2847284"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.1145\/2847263.2847265"},{"key":"ref43","first-page":"3","article-title":"Rectifier nonlinearities improve neural network acoustic models","volume":"30","author":"maas","year":"2013","journal-title":"Proc ICML"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2018.2834146"}],"container-title":["IEEE Access"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/6287639\/8948470\/09272337.pdf?arnumber=9272337","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2021,12,17]],"date-time":"2021-12-17T19:55:13Z","timestamp":1639770913000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9272337\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2020]]},"references-count":48,"URL":"https:\/\/doi.org\/10.1109\/access.2020.3040849","relation":{},"ISSN":["2169-3536"],"issn-type":[{"type":"electronic","value":"2169-3536"}],"subject":[],"published":{"date-parts":[[2020]]}}}