{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,1,18]],"date-time":"2025-01-18T05:08:40Z","timestamp":1737176920674,"version":"3.33.0"},"reference-count":17,"publisher":"IEEE","license":[{"start":{"date-parts":[[2024,12,15]],"date-time":"2024-12-15T00:00:00Z","timestamp":1734220800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,12,15]],"date-time":"2024-12-15T00:00:00Z","timestamp":1734220800000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024,12,15]]},"DOI":"10.1109\/bigdata62323.2024.10825076","type":"proceedings-article","created":{"date-parts":[[2025,1,16]],"date-time":"2025-01-16T18:31:23Z","timestamp":1737052283000},"page":"8868-8870","source":"Crossref","is-referenced-by-count":0,"title":["Improving Neural Networks Dropout Using An Enhanced Weights Scaling"],"prefix":"10.1109","author":[{"given":"Aviv","family":"Yehezkel","sequence":"first","affiliation":[{"name":"Tel-Aviv,Israel"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1145\/3510413"},{"article-title":"Improving neural networks by preventing co-adaptation of feature detectors","year":"2012","author":"Hinton","key":"ref2"},{"key":"ref3","article-title":"Dropout: A Simple Way to Prevent Neural Networks from Overfitting","author":"Srivastava","year":"2014","journal-title":"Journal of Machine Learning Research"},{"article-title":"Survey of Dropout Methods for Deep Neural Networks","year":"2019","author":"Labach","key":"ref4"},{"article-title":"DropBlock: A regularization method for convolutional networks","volume-title":"NIPS","author":"Ghiasi","key":"ref5"},{"article-title":"Zoneout: Regularizing RNNs by Randomly Preserving Hidden Activations","year":"2016","author":"Krueger","key":"ref6"},{"article-title":"Understanding Dropout","volume-title":"NIPS","author":"Baldi","key":"ref7"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/TKDE.2022.3187013"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1016\/j.neunet.2015.07.007"},{"article-title":"Recurrent neural network regularization","year":"2014","author":"Zaremba","key":"ref10"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/ASRU.2015.7404775"},{"key":"ref12","article-title":"Recurrent dropout without memory loss","author":"Semeniuta","year":"2016","journal-title":"COLING"},{"article-title":"Learning Multiple Layers of Features from Tiny Images","year":"2009","author":"Krizhevsky","key":"ref13"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.5555\/2002472.2002491"},{"article-title":"Dropout with expectation-linear regularization","year":"2016","author":"Ma","key":"ref16"},{"article-title":"Fraternal dropout","year":"2018","author":"Zolna","key":"ref17"},{"article-title":"R-Drop: Regularized Dropout for Neural Networks","volume-title":"NIPS","author":"Liang","key":"ref18"}],"event":{"name":"2024 IEEE International Conference on Big Data (BigData)","start":{"date-parts":[[2024,12,15]]},"location":"Washington, DC, USA","end":{"date-parts":[[2024,12,18]]}},"container-title":["2024 IEEE International Conference on Big Data (BigData)"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/10824975\/10824942\/10825076.pdf?arnumber=10825076","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,1,17]],"date-time":"2025-01-17T08:24:43Z","timestamp":1737102283000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10825076\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,12,15]]},"references-count":17,"URL":"https:\/\/doi.org\/10.1109\/bigdata62323.2024.10825076","relation":{},"subject":[],"published":{"date-parts":[[2024,12,15]]}}}