{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,14]],"date-time":"2026-04-14T12:00:11Z","timestamp":1776168011829,"version":"3.50.1"},"reference-count":39,"publisher":"Elsevier BV","license":[{"start":{"date-parts":[[2026,5,1]],"date-time":"2026-05-01T00:00:00Z","timestamp":1777593600000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.elsevier.com\/tdm\/userlicense\/1.0\/"},{"start":{"date-parts":[[2026,5,1]],"date-time":"2026-05-01T00:00:00Z","timestamp":1777593600000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.elsevier.com\/legal\/tdmrep-license"},{"start":{"date-parts":[[2026,2,11]],"date-time":"2026-02-11T00:00:00Z","timestamp":1770768000000},"content-version":"vor","delay-in-days":0,"URL":"http:\/\/creativecommons.org\/licenses\/by-nc-nd\/4.0\/"}],"funder":[{"DOI":"10.13039\/501100007835","name":"Silesian University of Technology","doi-asserted-by":"publisher","award":["POIR.01.01.01-00-1629\/20"],"award-info":[{"award-number":["POIR.01.01.01-00-1629\/20"]}],"id":[{"id":"10.13039\/501100007835","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":["elsevier.com","sciencedirect.com"],"crossmark-restriction":true},"short-container-title":["Entertainment Computing"],"published-print":{"date-parts":[[2026,5]]},"DOI":"10.1016\/j.entcom.2026.101099","type":"journal-article","created":{"date-parts":[[2026,2,11]],"date-time":"2026-02-11T16:53:36Z","timestamp":1770828816000},"page":"101099","update-policy":"https:\/\/doi.org\/10.1016\/elsevier_cm_policy","source":"Crossref","is-referenced-by-count":0,"special_numbering":"C","title":["Biomedical signals acquisition methodology for emotion classification in virtual reality game environment"],"prefix":"10.1016","volume":"57","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-4354-8258","authenticated-orcid":false,"given":"Agnieszka","family":"Szcz\u0119sna","sequence":"first","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0001-9659-7451","authenticated-orcid":false,"given":"Micha\u0142","family":"Staniszewski","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0002-9776-9654","authenticated-orcid":false,"given":"Micha\u0142","family":"Cogiel","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0001-5491-9096","authenticated-orcid":false,"given":"Pawe\u0142","family":"Foszner","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0009-0009-4450-0174","authenticated-orcid":false,"given":"Kamil","family":"Antos","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0009-0009-9244-9609","authenticated-orcid":false,"given":"Aleksander","family":"Kempski","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0002-1723-4001","authenticated-orcid":false,"given":"Monika","family":"B\u0142aszczyszyn","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0001-5731-2345","authenticated-orcid":false,"given":"Wojciech","family":"Borczyk","sequence":"additional","affiliation":[]}],"member":"78","reference":[{"issue":"3","key":"10.1016\/j.entcom.2026.101099_b1","doi-asserted-by":"crossref","first-page":"792","DOI":"10.3390\/s18030792","article-title":"Quality control procedure based on partitioning of NMR time series","volume":"18","author":"Staniszewski","year":"2018","journal-title":"Sensors"},{"issue":"1","key":"10.1016\/j.entcom.2026.101099_b2","doi-asserted-by":"crossref","first-page":"79","DOI":"10.1038\/s41597-023-01978-7","article-title":"Datasets for learning of unknown characteristics of dynamical systems","volume":"10","author":"Szcz\u0119sna","year":"2023","journal-title":"Sci. Data"},{"issue":"9","key":"10.1016\/j.entcom.2026.101099_b3","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1145\/3649448","article-title":"Deep learning for time series classification and extrinsic regression: A current survey","volume":"56","author":"Mohammadi Foumani","year":"2024","journal-title":"ACM Comput. Surv."},{"issue":"1","key":"10.1016\/j.entcom.2026.101099_b4","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1145\/3531326","article-title":"Time series prediction using deep learning methods in healthcare","volume":"14","author":"Morid","year":"2023","journal-title":"ACM Trans. Manag. Inf. Syst."},{"issue":"4","key":"10.1016\/j.entcom.2026.101099_b5","doi-asserted-by":"crossref","first-page":"212","DOI":"10.3390\/info11040212","article-title":"A systematic exploration of deep neural networks for EDA-based emotion recognition","volume":"11","author":"Yu","year":"2020","journal-title":"Information"},{"key":"10.1016\/j.entcom.2026.101099_b6","series-title":"Affective Computing and Intelligent Interaction","first-page":"497","article-title":"Emotion in games","author":"Yannakakis","year":"2011"},{"key":"10.1016\/j.entcom.2026.101099_b7","doi-asserted-by":"crossref","DOI":"10.1016\/j.ijhcs.2023.103037","article-title":"A process perspective of immersive virtual reality user experiences: Transition dynamics and mechanisms during gameplay","volume":"176","author":"Wan","year":"2023","journal-title":"Int. J. Hum.-Comput. Stud."},{"key":"10.1016\/j.entcom.2026.101099_b8","doi-asserted-by":"crossref","DOI":"10.1016\/j.ijhcs.2022.102791","article-title":"A design methodology for affective virtual reality","volume":"162","author":"Dozio","year":"2022","journal-title":"Int. J. Hum.-Comput. Stud."},{"key":"10.1016\/j.entcom.2026.101099_b9","doi-asserted-by":"crossref","DOI":"10.1016\/j.ijhcs.2023.103118","article-title":"Effects of immersive media on emotion and memory: An experiment comparing article, 360-video, and virtual reality","volume":"179","author":"Buji\u0107","year":"2023","journal-title":"Int. J. Hum.-Comput. Stud."},{"key":"10.1016\/j.entcom.2026.101099_b10","series-title":"Real-time automatic emotion recognition from body gestures","author":"Piana","year":"2014"},{"key":"10.1016\/j.entcom.2026.101099_b11","doi-asserted-by":"crossref","first-page":"2116","DOI":"10.3389\/fpsyg.2017.02116","article-title":"A public database of immersive VR videos with corresponding ratings of arousal, valence, and correlations between head movements and self report measures","volume":"8","author":"Li","year":"2017","journal-title":"Front. Psychol."},{"key":"10.1016\/j.entcom.2026.101099_b12","first-page":"70","article-title":"The international affective picture system (IAPS) in the study of emotion and attention","volume":"29","author":"Lang","year":"2007","journal-title":"Handb. Emot. Elicitation Assess."},{"key":"10.1016\/j.entcom.2026.101099_b13","doi-asserted-by":"crossref","first-page":"596","DOI":"10.3758\/s13428-013-0379-1","article-title":"The Nencki affective picture system (NAPS): Introduction to a novel, standardized, wide-range, high-quality, realistic picture database","volume":"46","author":"Marchewka","year":"2014","journal-title":"Behav. Res. Methods"},{"key":"10.1016\/j.entcom.2026.101099_b14","doi-asserted-by":"crossref","first-page":"1415","DOI":"10.3758\/s13428-018-1027-6","article-title":"Affective auditory stimulus database: An expanded version of the international affective digitized sounds (IADS-E)","volume":"50","author":"Yang","year":"2018","journal-title":"Behav. Res. Methods"},{"issue":"2","key":"10.1016\/j.entcom.2026.101099_b15","doi-asserted-by":"crossref","first-page":"531","DOI":"10.3758\/BRM.40.2.531","article-title":"The montreal affective voices: A validated set of nonverbal affect bursts for research on auditory affective processing","volume":"40","author":"Belin","year":"2008","journal-title":"Behav. Res. Methods"},{"issue":"1","key":"10.1016\/j.entcom.2026.101099_b16","doi-asserted-by":"crossref","first-page":"42","DOI":"10.1109\/T-AFFC.2011.25","article-title":"A multimodal database for affect recognition and implicit tagging","volume":"3","author":"Soleymani","year":"2011","journal-title":"IEEE Trans. Affect. Comput."},{"key":"10.1016\/j.entcom.2026.101099_b17","series-title":"2018 International Joint Conference on Neural Networks","first-page":"1","article-title":"Emotion recognition from multi-channel EEG through parallel convolutional recurrent neural network","author":"Yang","year":"2018"},{"key":"10.1016\/j.entcom.2026.101099_b18","doi-asserted-by":"crossref","first-page":"313","DOI":"10.1007\/s11257-010-9078-0","article-title":"Towards affective camera control in games","volume":"20","author":"Yannakakis","year":"2010","journal-title":"User Model. User-Adapt. Interact."},{"issue":"4","key":"10.1016\/j.entcom.2026.101099_b19","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1145\/3495002","article-title":"VREED: Virtual reality emotion recognition dataset using eye tracking & physiological measures","volume":"5","author":"Tabbaa","year":"2021","journal-title":"Proc. ACM Interact. Mob. Wearable Ubiquitous Technol."},{"key":"10.1016\/j.entcom.2026.101099_b20","doi-asserted-by":"crossref","unstructured":"P. Schmidt, A. Reiss, R. Duerichen, C. Marberger, K. Van Laerhoven, Introducing wesad, a multimodal dataset for wearable stress and affect detection, in: Proceedings of the 20th ACM International Conference on Multimodal Interaction, 2018, pp. 400\u2013408.","DOI":"10.1145\/3242969.3242985"},{"issue":"1","key":"10.1016\/j.entcom.2026.101099_b21","doi-asserted-by":"crossref","first-page":"18","DOI":"10.1109\/T-AFFC.2011.15","article-title":"Deap: A database for emotion analysis; using physiological signals","volume":"3","author":"Koelstra","year":"2011","journal-title":"IEEE Trans. Affect. Comput."},{"key":"10.1016\/j.entcom.2026.101099_b22","doi-asserted-by":"crossref","first-page":"140990","DOI":"10.1109\/ACCESS.2019.2944001","article-title":"A review, current challenges, and future possibilities on emotion recognition using machine learning and physiological signals","volume":"7","author":"Bota","year":"2019","journal-title":"IEEE Access"},{"issue":"1","key":"10.1016\/j.entcom.2026.101099_b23","doi-asserted-by":"crossref","first-page":"124","DOI":"10.3390\/e25010124","article-title":"Cross-corpus speech emotion recognition based on multi-task learning and subdomain adaptation","volume":"25","author":"Fu","year":"2023","journal-title":"Entropy"},{"key":"10.1016\/j.entcom.2026.101099_b24","series-title":"Neuroergonomics","first-page":"191","article-title":"How to recognize emotions without signal processing: An application of convolutional neural network to physiological signals","author":"Martin","year":"2019"},{"key":"10.1016\/j.entcom.2026.101099_b25","series-title":"2022 10th International Conference on Affective Computing and Intelligent Interaction","first-page":"1","article-title":"Emotion recognition with pre-trained transformers using multimodal signals","author":"Vazquez-Rodriguez","year":"2022"},{"key":"10.1016\/j.entcom.2026.101099_b26","series-title":"2019 IEEE International Conference on Bioinformatics and Biomedicine","first-page":"898","article-title":"Fusing transformer model with temporal features for ECG heartbeat classification","author":"Yan","year":"2019"},{"key":"10.1016\/j.entcom.2026.101099_b27","series-title":"2022 44th Annual International Conference of the IEEE Engineering in Medicine & Biology Society","first-page":"3563","article-title":"Multimodal neurophysiological transformer for emotion recognition","author":"Koorathota","year":"2022"},{"key":"10.1016\/j.entcom.2026.101099_b28","series-title":"Husformer: A multi-modal transformer for multi-modal human state recognition","author":"Wang","year":"2022"},{"key":"10.1016\/j.entcom.2026.101099_b29","doi-asserted-by":"crossref","first-page":"87","DOI":"10.3389\/fnins.2020.00087","article-title":"Latent factor decoding of multi-channel EEG for emotion recognition through autoencoder-like neural networks","volume":"14","author":"Li","year":"2020","journal-title":"Front. Neurosci."},{"key":"10.1016\/j.entcom.2026.101099_b30","doi-asserted-by":"crossref","first-page":"37","DOI":"10.3389\/fnbot.2019.00037","article-title":"SAE+ LSTM: A new framework for emotion recognition from multi-channel EEG","volume":"13","author":"Xing","year":"2019","journal-title":"Front. Neurorobotics"},{"issue":"21","key":"10.1016\/j.entcom.2026.101099_b31","doi-asserted-by":"crossref","first-page":"8467","DOI":"10.3390\/s22218467","article-title":"M1M2: Deep-learning-based real-time emotion recognition from neural activity","volume":"22","author":"Akter","year":"2022","journal-title":"Sensors"},{"issue":"4","key":"10.1016\/j.entcom.2026.101099_b32","doi-asserted-by":"crossref","first-page":"917","DOI":"10.1007\/s10618-019-00619-1","article-title":"Deep learning for time series classification: a review","volume":"33","author":"Ismail Fawaz","year":"2019","journal-title":"Data Min. Knowl. Discov."},{"issue":"6","key":"10.1016\/j.entcom.2026.101099_b33","doi-asserted-by":"crossref","first-page":"1936","DOI":"10.1007\/s10618-020-00710-y","article-title":"Inceptiontime: Finding alexnet for time series classification","volume":"34","author":"Ismail Fawaz","year":"2020","journal-title":"Data Min. Knowl. Discov."},{"issue":"6","key":"10.1016\/j.entcom.2026.101099_b34","doi-asserted-by":"crossref","first-page":"1161","DOI":"10.1037\/h0077714","article-title":"A circumplex model of affect","volume":"39","author":"Russell","year":"1980","journal-title":"J. Pers. Soc. Psychol."},{"issue":"2","key":"10.1016\/j.entcom.2026.101099_b35","doi-asserted-by":"crossref","first-page":"143","DOI":"10.1016\/j.ijpsycho.2003.08.002","article-title":"Autonomic specificity of discrete emotion and dimensions of affective space: A multivariate approach","volume":"51","author":"Christie","year":"2004","journal-title":"Int. J. Psychophysiol."},{"issue":"1","key":"10.1016\/j.entcom.2026.101099_b36","doi-asserted-by":"crossref","first-page":"16","DOI":"10.3390\/bdcc6010016","article-title":"A dataset for emotion recognition using virtual reality and EEG (DER-VREEG): emotional state classification using low-cost wearable VR-EEG headsets","volume":"6","author":"Suhaimi","year":"2022","journal-title":"Big Data Cogn. Comput."},{"issue":"39\u201358","key":"10.1016\/j.entcom.2026.101099_b37","first-page":"3","article-title":"International affective picture system (IAPS): Technical manual and affective ratings","volume":"1","author":"Lang","year":"1997","journal-title":"NIMH Cent. Study Emot. Atten."},{"key":"10.1016\/j.entcom.2026.101099_b38","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1155\/2016\/5182768","article-title":"How color properties can be used to elicit emotions in video games","volume":"2016","author":"Geslin","year":"2016","journal-title":"Int. J. Comput. Games Technol."},{"key":"10.1016\/j.entcom.2026.101099_b39","series-title":"11th International Conference on Intelligent Games and Simulation GAME-on","first-page":"61","article-title":"Colors and emotions in video games","author":"Joosten","year":"2010"}],"container-title":["Entertainment Computing"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S1875952126000212?httpAccept=text\/xml","content-type":"text\/xml","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S1875952126000212?httpAccept=text\/plain","content-type":"text\/plain","content-version":"vor","intended-application":"text-mining"}],"deposited":{"date-parts":[[2026,4,14]],"date-time":"2026-04-14T11:14:29Z","timestamp":1776165269000},"score":1,"resource":{"primary":{"URL":"https:\/\/linkinghub.elsevier.com\/retrieve\/pii\/S1875952126000212"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2026,5]]},"references-count":39,"alternative-id":["S1875952126000212"],"URL":"https:\/\/doi.org\/10.1016\/j.entcom.2026.101099","relation":{},"ISSN":["1875-9521"],"issn-type":[{"value":"1875-9521","type":"print"}],"subject":[],"published":{"date-parts":[[2026,5]]},"assertion":[{"value":"Elsevier","name":"publisher","label":"This article is maintained by"},{"value":"Biomedical signals acquisition methodology for emotion classification in virtual reality game environment","name":"articletitle","label":"Article Title"},{"value":"Entertainment Computing","name":"journaltitle","label":"Journal Title"},{"value":"https:\/\/doi.org\/10.1016\/j.entcom.2026.101099","name":"articlelink","label":"CrossRef DOI link to publisher maintained version"},{"value":"article","name":"content_type","label":"Content Type"},{"value":"\u00a9 2026 The Authors. Published by Elsevier B.V.","name":"copyright","label":"Copyright"}],"article-number":"101099"}}