{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2024,10,30]],"date-time":"2024-10-30T03:38:43Z","timestamp":1730259523304,"version":"3.28.0"},"reference-count":11,"publisher":"IEEE","license":[{"start":{"date-parts":[[2020,10,21]],"date-time":"2020-10-21T00:00:00Z","timestamp":1603238400000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2020,10,21]],"date-time":"2020-10-21T00:00:00Z","timestamp":1603238400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2020,10,21]],"date-time":"2020-10-21T00:00:00Z","timestamp":1603238400000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2020,10,21]]},"DOI":"10.1109\/ictc49870.2020.9289463","type":"proceedings-article","created":{"date-parts":[[2020,12,21]],"date-time":"2020-12-21T17:58:16Z","timestamp":1608573496000},"page":"1276-1278","source":"Crossref","is-referenced-by-count":3,"title":["Knowledge Distillation based Compact Model Learning Method for Object Detection"],"prefix":"10.1109","author":[{"given":"Jong Gook","family":"Ko","sequence":"first","affiliation":[]},{"given":"Wonyoung","family":"Yoo","sequence":"additional","affiliation":[]}],"member":"263","reference":[{"article-title":"Unifying Distillation and Privileged Information","year":"2015","author":"lopez-paz","key":"ref4"},{"key":"ref3","first-page":"2","article-title":"Learning Using Privileged Information: Similarity Control and Knowledge Transfer","volume":"16","author":"vapnik","year":"2015","journal-title":"Journal of Machine Learning Research"},{"article-title":"Large scale distributed neural network training through online distillation","year":"2018","author":"anil","key":"ref10"},{"article-title":"Fitnets: Hints for thin deep nets","year":"2014","author":"romero","key":"ref6"},{"article-title":"Born again neural networks","year":"2018","author":"furlanello","key":"ref11"},{"key":"ref5","first-page":"2654","article-title":"Do deep nets really need to be deep?","author":"ba","year":"2014","journal-title":"Advances in neural information processing systems"},{"key":"ref8","first-page":"5142","article-title":"Towards understanding knowledge distillation","author":"phuong","year":"2019","journal-title":"International Conference on Machine Learning"},{"article-title":"Born again neural networks","year":"2018","author":"furlanello","key":"ref7"},{"key":"ref2","first-page":"2654","article-title":"Do deep nets really need to be deep?","author":"ba","year":"2014","journal-title":"Advances in neural information processing systems"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1145\/1150402.1150464"},{"article-title":"Distilling the knowledge in a neural network","year":"2015","author":"hinton","key":"ref1"}],"event":{"name":"2020 International Conference on Information and Communication Technology Convergence (ICTC)","start":{"date-parts":[[2020,10,21]]},"location":"Jeju, Korea (South)","end":{"date-parts":[[2020,10,23]]}},"container-title":["2020 International Conference on Information and Communication Technology Convergence (ICTC)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/9289075\/9289076\/09289463.pdf?arnumber=9289463","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,6,27]],"date-time":"2022-06-27T20:19:21Z","timestamp":1656361161000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/9289463\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2020,10,21]]},"references-count":11,"URL":"https:\/\/doi.org\/10.1109\/ictc49870.2020.9289463","relation":{},"subject":[],"published":{"date-parts":[[2020,10,21]]}}}