{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,4,3]],"date-time":"2026-04-03T21:54:53Z","timestamp":1775253293318,"version":"3.50.1"},"reference-count":80,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","license":[{"start":{"date-parts":[[2025,1,1]],"date-time":"2025-01-01T00:00:00Z","timestamp":1735689600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/legalcode"}],"funder":[{"name":"Khalifa University, Abu Dhabi, United Arab Emirates,","award":["CIRA-2020-031"],"award-info":[{"award-number":["CIRA-2020-031"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Access"],"published-print":{"date-parts":[[2025]]},"DOI":"10.1109\/access.2025.3529125","type":"journal-article","created":{"date-parts":[[2025,1,13]],"date-time":"2025-01-13T20:04:15Z","timestamp":1736798655000},"page":"16752-16769","source":"Crossref","is-referenced-by-count":8,"title":["Novel Speech-Based Emotion Climate Recognition in Peers\u2019 Conversations Incorporating Affect Dynamics and Temporal Convolutional Neural Networks"],"prefix":"10.1109","volume":"13","author":[{"ORCID":"https:\/\/orcid.org\/0000-0001-6181-8306","authenticated-orcid":false,"given":"Ghada","family":"Alhussein","sequence":"first","affiliation":[{"name":"Department of Biomedical Engineering and Biotechnology, Khalifa University of Science and Technology, Abu Dhabi, United Arab Emirates"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-5248-6327","authenticated-orcid":false,"given":"Mohanad","family":"Alkhodari","sequence":"additional","affiliation":[{"name":"Radcliffe Department of Medicine, Cardiovascular Clinical Research Facility, University of Oxford, Oxford, U.K."}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-0636-1646","authenticated-orcid":false,"given":"Ahsan H.","family":"Khandoker","sequence":"additional","affiliation":[{"name":"Department of Biomedical Engineering and Biotechnology, Khalifa University of Science and Technology, Abu Dhabi, United Arab Emirates"}]},{"ORCID":"https:\/\/orcid.org\/0000-0002-9932-9302","authenticated-orcid":false,"given":"Leontios J.","family":"Hadjileontiadis","sequence":"additional","affiliation":[{"name":"Department of Biomedical Engineering and Biotechnology, Khalifa University of Science and Technology, Abu Dhabi, United Arab Emirates"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1016\/B978-0-08-097086-8.25006-X"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.4324\/9781315086071"},{"key":"ref3","article-title":"Mental health","author":"Dattani","year":"2023","journal-title":"Our World Data"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1001\/jamapsychiatry.2014.2502"},{"key":"ref5","doi-asserted-by":"publisher","DOI":"10.1007\/s10772-017-9457-6"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.21437\/Interspeech.2018-1242"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.3389\/fpsyg.2018.00613"},{"issue":"1","key":"ref8","first-page":"1","article-title":"Heart sound signals can be used for emotion recognition","volume-title":"Sci. Rep.","volume":"9","author":"Xiefeng","year":"2019"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1007\/s10865-009-9225-4"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1016\/j.pec.2004.12.003"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1007\/s00521-021-06012-8"},{"key":"ref12","first-page":"1","article-title":"BERS: Bussiness-related emotion recognition system in Urdu language using machine learning","volume-title":"Proc. 5th Int. Conf. Behav., Econ., Socio-Cultural Comput.","author":"Hameed"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.14257\/ijsh.2016.10.8.14"},{"key":"ref14","doi-asserted-by":"publisher","DOI":"10.1126\/science.164.3875.86"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1016\/0010-0277(92)90002-Y"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.3389\/fnhum.2013.00551"},{"issue":"11","key":"ref17","doi-asserted-by":"crossref","first-page":"4782","DOI":"10.3390\/app11114782","article-title":"Make patient consultation warmer: A clinical application for speech emotion recognition","volume":"11","author":"Li","year":"2021","journal-title":"Appl. Sci."},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/ICMI.2002.1166983"},{"key":"ref19","first-page":"396","article-title":"Relational agents: A model and implementation of building user trust","volume-title":"Proc. SIGCHI Conf. Human Factors Comput. Syst.","author":"Bickmore"},{"key":"ref20","first-page":"873","article-title":"Context-dependent sentiment analysis in user-generated videos","volume-title":"Proc. 55th Annu. Meeting Assoc. Comput. Linguistics","volume":"1","author":"Poria"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.sigdial-1.23"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-319-72038-8_13"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1007\/s10579-008-9076-6"},{"key":"ref24","article-title":"DailyDialog: A manually labelled multi-turn dialogue dataset","author":"Li","year":"2017","journal-title":"arXiv:1710.03957"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1050"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/P19-1176"},{"key":"ref27","doi-asserted-by":"publisher","DOI":"10.3390\/s21144913"},{"key":"ref28","first-page":"2594","article-title":"ICON: Interactive conversational memory network for multimodal emotion detection","volume-title":"Proc. Conf. Empirical Methods Natural Lang. Process.","author":"Hazarika"},{"key":"ref29","first-page":"2122","article-title":"Conversational memory network for emotion recognition in dyadic dialogue videos","volume-title":"Proc. NAACL HLT","author":"Hazarika"},{"key":"ref30","first-page":"6818","article-title":"DialogueRNN: An attentive RNN for emotion detection in conversations","volume-title":"Proc. AAAI Conf. Artif. Intell.","volume":"33","author":"Majumder"},{"key":"ref31","doi-asserted-by":"publisher","DOI":"10.24963\/ijcai.2019\/755"},{"key":"ref32","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/D19-1015"},{"key":"ref33","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2021.acl-long.440"},{"issue":"1","key":"ref34","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1038\/s41597-020-00630-y","article-title":"K-EmoCon, a multimodal sensor dataset for continuous emotion recognition in naturalistic conversations","volume":"7","author":"Park","year":"2020","journal-title":"Scientific Data"},{"key":"ref35","first-page":"1","article-title":"Sewa: A multimodal database of spontaneous affective interactions","volume-title":"Proc. IEEE Int. Conf. Affective Comput. Intell. Interact. (ACII)","author":"Kne\u017eevi\u2019c"},{"issue":"2","key":"ref36","doi-asserted-by":"crossref","first-page":"101","DOI":"10.17505\/jpor.2019.09","article-title":"Affect dynamics as predictors of symptom severity and treatment response in mood and anxiety disorders: Evidence for specificity","volume":"5","author":"Bosley","year":"2019","journal-title":"J. Person-Oriented Res."},{"key":"ref37","doi-asserted-by":"publisher","DOI":"10.1038\/s41562-019-0555-0"},{"key":"ref38","doi-asserted-by":"publisher","DOI":"10.1007\/BF02943243"},{"key":"ref39","volume-title":"Theory and Applications of Digital Speech Processing","author":"Rabiner","year":"2010"},{"key":"ref40","doi-asserted-by":"publisher","DOI":"10.1371\/journal.pone.0262448"},{"key":"ref41","doi-asserted-by":"publisher","DOI":"10.1109\/5.237532"},{"key":"ref42","doi-asserted-by":"publisher","DOI":"10.1038\/s41598-020-77264-y"},{"key":"ref43","article-title":"An empirical evaluation of generic convolutional and recurrent networks for sequence modeling","author":"Bai","year":"2018","journal-title":"arXiv:1803.01271"},{"key":"ref44","doi-asserted-by":"publisher","DOI":"10.3390\/electronics8080876"},{"issue":"1","key":"ref45","first-page":"1","article-title":"Temporal convolutional networks for the advance prediction of ENSO","volume-title":"Sci. Rep.","volume":"10","author":"Yan","year":"2020"},{"key":"ref46","doi-asserted-by":"publisher","DOI":"10.1007\/s41870-019-00409-4"},{"key":"ref47","first-page":"1","article-title":"SVM kernel functions for classification","volume-title":"Proc. Int. Conf. Adv. Technol. Eng. (ICATE)","author":"Patle"},{"key":"ref48","doi-asserted-by":"publisher","DOI":"10.1109\/TIT.1967.1053964"},{"key":"ref49","doi-asserted-by":"publisher","DOI":"10.1016\/j.healun.2017.07.005"},{"key":"ref50","first-page":"265","article-title":"On optimization methods for deep learning","volume-title":"Proc. ICML","author":"Ngiam"},{"key":"ref51","doi-asserted-by":"publisher","DOI":"10.1109\/ICDH55609.2022.00023"},{"key":"ref52","doi-asserted-by":"publisher","DOI":"10.1016\/j.neunet.2017.02.013"},{"issue":"9","key":"ref53","article-title":"Speech emotion recognition using multichannel feature fusion and recurrent neural network","volume":"14","author":"Lee","year":"2019","journal-title":"PLoS ONE"},{"key":"ref54","doi-asserted-by":"publisher","DOI":"10.3390\/s21165317"},{"key":"ref55","doi-asserted-by":"publisher","DOI":"10.1109\/ICASSP.2018.8462162"},{"key":"ref56","doi-asserted-by":"publisher","DOI":"10.1109\/TAFFC.2020.2983669"},{"key":"ref57","doi-asserted-by":"publisher","DOI":"10.3390\/sym14071428"},{"key":"ref58","doi-asserted-by":"publisher","DOI":"10.1145\/3266302.3266306"},{"key":"ref59","doi-asserted-by":"publisher","DOI":"10.21437\/Interspeech.2008-192"},{"key":"ref60","doi-asserted-by":"publisher","DOI":"10.1007\/978-3-031-28238-6_11"},{"issue":"1","key":"ref61","first-page":"21","article-title":"Prosody and emotional expression in different languages","volume":"26","author":"Vaissiere","year":"1983","journal-title":"Lang. Speech"},{"key":"ref62","volume-title":"Lexical Semantics and Emotion","author":"Wierzbicka","year":"1985"},{"issue":"2","key":"ref63","first-page":"99","article-title":"Cultural differences in emotional expression and perception","volume":"46","author":"Sneddon","year":"2011","journal-title":"Int. J. Psychol."},{"key":"ref64","doi-asserted-by":"publisher","DOI":"10.1016\/j.ipm.2022.103234"},{"key":"ref65","doi-asserted-by":"publisher","DOI":"10.1109\/FIT.2018.00023"},{"key":"ref66","doi-asserted-by":"publisher","DOI":"10.1037\/1528-3542.5.2.175"},{"key":"ref67","doi-asserted-by":"publisher","DOI":"10.5406\/amerjpsyc.130.3.0367"},{"key":"ref68","doi-asserted-by":"publisher","DOI":"10.24963\/ijcai.2023\/689"},{"key":"ref69","doi-asserted-by":"publisher","DOI":"10.2196\/32557"},{"key":"ref70","doi-asserted-by":"publisher","DOI":"10.1007\/BF02344719"},{"key":"ref71","doi-asserted-by":"publisher","DOI":"10.1201\/b23083"},{"key":"ref72","doi-asserted-by":"publisher","DOI":"10.1016\/j.jbusres.2020.11.030"},{"key":"ref73","doi-asserted-by":"publisher","DOI":"10.1016\/j.chb.2018.07.026"},{"key":"ref74","doi-asserted-by":"publisher","DOI":"10.1080\/1461670X.2021.1916984"},{"key":"ref75","doi-asserted-by":"publisher","DOI":"10.1109\/JBHI.2022.3225330"},{"key":"ref76","doi-asserted-by":"publisher","DOI":"10.1109\/ACCESS.2022.3165622"},{"key":"ref77","doi-asserted-by":"publisher","DOI":"10.1109\/TAFFC.2015.2457417"},{"key":"ref78","doi-asserted-by":"publisher","DOI":"10.1007\/s11042-012-1157-2"},{"key":"ref79","doi-asserted-by":"publisher","DOI":"10.1140\/epjds\/s13688-019-0219-3"},{"key":"ref80","doi-asserted-by":"publisher","DOI":"10.1109\/JBHI.2022.3223127"}],"container-title":["IEEE Access"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx8\/6287639\/10820123\/10839385.pdf?arnumber=10839385","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,1,30]],"date-time":"2025-01-30T19:24:16Z","timestamp":1738265056000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10839385\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025]]},"references-count":80,"URL":"https:\/\/doi.org\/10.1109\/access.2025.3529125","relation":{},"ISSN":["2169-3536"],"issn-type":[{"value":"2169-3536","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025]]}}}