{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,17]],"date-time":"2026-03-17T11:11:56Z","timestamp":1773745916630,"version":"3.50.1"},"reference-count":53,"publisher":"Springer Science and Business Media LLC","issue":"1","license":[{"start":{"date-parts":[[2025,12,21]],"date-time":"2025-12-21T00:00:00Z","timestamp":1766275200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2025,12,21]],"date-time":"2025-12-21T00:00:00Z","timestamp":1766275200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"funder":[{"DOI":"10.13039\/501100014826","name":"ADAPT - Centre for Digital Content Technology","doi-asserted-by":"publisher","award":["13\/RC\/2106_P2"],"award-info":[{"award-number":["13\/RC\/2106_P2"]}],"id":[{"id":"10.13039\/501100014826","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["J Multimodal User Interfaces"],"published-print":{"date-parts":[[2026,3]]},"DOI":"10.1007\/s12193-025-00471-2","type":"journal-article","created":{"date-parts":[[2025,12,21]],"date-time":"2025-12-21T11:27:25Z","timestamp":1766316445000},"page":"69-86","update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":0,"title":["Prediction of self-reported and external observations of conversational engagement in online group discussions"],"prefix":"10.1007","volume":"20","author":[{"ORCID":"https:\/\/orcid.org\/0000-0002-8667-2638","authenticated-orcid":false,"given":"Sam","family":"O\u2019Connor Russell","sequence":"first","affiliation":[]},{"given":"Justine","family":"Reverdy","sequence":"additional","affiliation":[]},{"given":"Benjamin","family":"Cowan","sequence":"additional","affiliation":[]},{"given":"Naomi","family":"Harte","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2025,12,21]]},"reference":[{"key":"471_CR1","doi-asserted-by":"publisher","DOI":"10.1080\/00461520.2014.1002924","volume-title":"The challenges of defining and measuring student engagement in science","author":"GM Sinatra","year":"2015","unstructured":"Sinatra GM, Heddy BC, Lombardi D (2015) The challenges of defining and measuring student engagement in science. Taylor & Francis"},{"issue":"10","key":"471_CR2","doi-asserted-by":"publisher","first-page":"1398","DOI":"10.1109\/JPROC.2023.3309560","volume":"111","author":"BM Booth","year":"2023","unstructured":"Booth BM, Bosch N, D\u2019Mello SK (2023) Engagement detection and its applications in learning: a tutorial and selective review. Proc IEEE 111(10):1398\u20131422","journal-title":"Proc IEEE"},{"key":"471_CR3","doi-asserted-by":"publisher","first-page":"36","DOI":"10.1016\/j.compedu.2015.09.005","volume":"90","author":"CR Henrie","year":"2015","unstructured":"Henrie CR, Halverson LR, Graham CR (2015) Measuring student engagement in technology-mediated learning: A review. Computers & Education 90:36\u201353","journal-title":"Computers & Education"},{"issue":"5","key":"471_CR4","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1145\/3234149","volume":"51","author":"K Doherty","year":"2018","unstructured":"Doherty K, Doherty G (2018) Engagement in hci: conception, theory and measurement. ACM Computing Surveys (CSUR) 51(5):1\u201339","journal-title":"ACM Computing Surveys (CSUR)"},{"issue":"1","key":"471_CR5","doi-asserted-by":"publisher","first-page":"59","DOI":"10.3102\/00346543074001059","volume":"74","author":"JA Fredricks","year":"2004","unstructured":"Fredricks JA, Blumenfeld PC, Paris AH (2004) School engagement: Potential of the concept, state of the evidence. Rev Educ Res 74(1):59\u2013109","journal-title":"Rev Educ Res"},{"key":"471_CR6","volume-title":"Student engagement, context, and adjustment: Addressing definitional, measurement, and methodological issues","author":"JA Fredricks","year":"2016","unstructured":"Fredricks JA, Filsecker M, Lawson MA (2016) Student engagement, context, and adjustment: Addressing definitional, measurement, and methodological issues. Elsevier"},{"key":"471_CR7","doi-asserted-by":"publisher","unstructured":"Bosch N, D\u2019Mello S, Baker R, Ocumpaugh J, Shute V, Ventura M, Wang L, Zhao W. Automatic Detection of Learning-Centered Affective States in the Wild. In: Proceedings of the 20th International Conference on Intelligent User Interfaces, pp. 379\u2013388. ACM, Atlanta Georgia USA (2015). https:\/\/doi.org\/10.1145\/2678025.2701397 . Accessed 2022-01-25","DOI":"10.1145\/2678025.2701397"},{"issue":"1","key":"471_CR8","doi-asserted-by":"publisher","first-page":"3","DOI":"10.3233\/IRG-2006-16(1)02","volume":"16","author":"SK D\u2019Mello","year":"2006","unstructured":"D\u2019Mello SK, Craig SD, Sullins J, Graesser AC (2006) Predicting affective states expressed through an emote-aloud procedure from AutoTutor\u2019s mixed-initiative dialogue. International Journal of Artificial Intelligence in Education 16(1):3\u201328 (Publisher: IOS Press)","journal-title":"International Journal of Artificial Intelligence in Education"},{"issue":"2","key":"471_CR9","doi-asserted-by":"publisher","first-page":"104","DOI":"10.1080\/00461520.2017.1281747","volume":"52","author":"S D\u2019Mello","year":"2017","unstructured":"D\u2019Mello S, Dieterle E, Duckworth A (2017) Advanced, analytic, automated (aaa) measurement of engagement during learning. Educational psychologist 52(2):104\u2013123","journal-title":"Educational psychologist"},{"key":"471_CR10","unstructured":"Reverdy J, O\u2019Connor\u00a0Russell S, Duquenne L, Garaialde D, Cowan BR, Harte N. RoomReader: A multimodal corpus of online multiparty conversational interactions. In: Calzolari N, B\u00e9chet F, Blache P, Choukri K, Cieri C, Declerck T, Goggi S, Isahara H, Maegaard B, Mariani J, Mazo H, Odijk J, Piperidis S. (eds.) Proceedings of the Thirteenth Language Resources and Evaluation Conference, pp. 2517\u20132527. European Language Resources Association, Marseille, France (2022). https:\/\/aclanthology.org\/2022.lrec-1.268\/"},{"key":"471_CR11","first-page":"4079","volume":"2024","author":"K Suzuki","year":"2024","unstructured":"Suzuki K, Hojo N, Shinoda K, Mizuno S, Masumura R (2024) Participant-pair-wise bottleneck transformer for engagement estimation from video conversation. Proc. Interspeech 2024:4079\u20134083","journal-title":"Proc. Interspeech"},{"issue":"4","key":"471_CR12","doi-asserted-by":"publisher","first-page":"401","DOI":"10.1207\/s15327051hci1004_2","volume":"10","author":"AJ Sellen","year":"1995","unstructured":"Sellen AJ (1995) Remote conversations: The effects of mediating talk with technology. Human-computer interaction 10(4):401\u2013444","journal-title":"Human-computer interaction"},{"key":"471_CR13","doi-asserted-by":"crossref","unstructured":"Isaacs EA, Tang JC (1993) What video can and can\u2019t do for collaboration: a case study. In: Proceedings of the First ACM International Conference on Multimedia, pp. 199\u2013206","DOI":"10.1145\/166266.166289"},{"issue":"3","key":"471_CR14","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1145\/3550328","volume":"6","author":"B DiSalvo","year":"2022","unstructured":"DiSalvo B, Bandaru D, Wang Q, Li H, Pl\u00f6tz T (2022) Reading the room: Automated, momentary assessment of student engagement in the classroom: Are we there yet? Proceedings of the ACM on Interactive Mobile Wearable and Ubiquitous Technologies 6(3):1\u201326","journal-title":"Proceedings of the ACM on Interactive Mobile Wearable and Ubiquitous Technologies"},{"key":"471_CR15","doi-asserted-by":"publisher","unstructured":"Gupta A, D\u2019Cunha A, Awasthi K, Balasubramanian V (2016) Daisee: Towards user engagement recognition in the wild. arXiv preprint arXiv:1609.01885https:\/\/doi.org\/10.48550\/arXiv.1609.01885","DOI":"10.48550\/arXiv.1609.01885"},{"key":"471_CR16","doi-asserted-by":"crossref","unstructured":"Yu C, Aoki PM, Woodruff A (2004) Detecting user engagement in everyday conversations. arXiv preprint arXiv:cs\/0410027","DOI":"10.21437\/Interspeech.2004-327"},{"key":"471_CR17","doi-asserted-by":"crossref","unstructured":"Lee DW, Kim Y, Picard RW, Breazeal C, Park HW (2023) Multipar-t: multiparty-transformer for capturing contingent behaviors in group conversations. In: Proceedings of the Thirty-Second International Joint Conference on Artificial Intelligence, pp. 3893\u20133901","DOI":"10.24963\/ijcai.2023\/433"},{"key":"471_CR18","doi-asserted-by":"publisher","DOI":"10.1016\/j.inffus.2024.102224","volume":"105","author":"M Li","year":"2024","unstructured":"Li M, Zhuang X, Bai L, Ding W (2024) Multimodal graph learning based on 3d haar semi-tight framelet for student engagement prediction. Information Fusion 105:102224","journal-title":"Information Fusion"},{"issue":"1","key":"471_CR19","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1186\/s40561-018-0080-z","volume":"6","author":"MAA Dewan","year":"2019","unstructured":"Dewan MAA, Murshed M, Lin F (2019) Engagement detection in online learning: a review. Smart Learning Environments 6(1):1. https:\/\/doi.org\/10.1186\/s40561-018-0080-z","journal-title":"Smart Learning Environments"},{"key":"471_CR20","doi-asserted-by":"publisher","unstructured":"Goldberg P, S\u00fcmer \u00ee, St\u00fcrmer K, Wagner W, G\u00f6llner R, Gerjets P, Kasneci E, Trautwein U, (2021) Attentive or Not? Toward a Machine Learning Approach to Assessing Students\u2019 Visible Engagement in Classroom Instruction. Educational Psychology Review 33(1):27\u201349. https:\/\/doi.org\/10.1007\/s10648-019-09514-z","DOI":"10.1007\/s10648-019-09514-z"},{"key":"471_CR21","doi-asserted-by":"publisher","first-page":"153","DOI":"10.1016\/j.learninstruc.2012.05.003","volume":"29","author":"S D\u2019Mello","year":"2014","unstructured":"D\u2019Mello S, Lehman B, Pekrun R, Graesser A (2014) Confusion can be beneficial for learning. Learning and Instruction 29:153\u2013170. https:\/\/doi.org\/10.1016\/j.learninstruc.2012.05.003. (Publisher: Elsevier)","journal-title":"Learning and Instruction"},{"key":"471_CR22","doi-asserted-by":"publisher","first-page":"63","DOI":"10.1016\/j.ijhcs.2017.10.006","volume":"110","author":"K Doherty","year":"2018","unstructured":"Doherty K, Doherty G (2018) The construal of experience in hci: Understanding self-reports. International Journal of Human-Computer Studies 110:63\u201374. https:\/\/doi.org\/10.1016\/j.ijhcs.2017.10.006","journal-title":"International Journal of Human-Computer Studies"},{"key":"471_CR23","doi-asserted-by":"publisher","DOI":"10.1109\/TAFFC.2021.3127692","author":"O Sumer","year":"2021","unstructured":"Sumer O, Goldberg P, D\u2019Mello S, Gerjets P, Trautwein U, Kasneci E (2021) Multimodal engagement analysis from facial videos in the classroom. IEEE Trans Affect Comput. https:\/\/doi.org\/10.1109\/TAFFC.2021.3127692","journal-title":"IEEE Trans Affect Comput"},{"key":"471_CR24","unstructured":"D\u2019Mello SK (2021) Improving student engagement in and with digital learning technologies. OECD Digital Education Outlook, Pushing the Frontiers with Artificial Intelligence, Blockchain and Robots: Pushing the Frontiers with Artificial Intelligence, Blockchain and Robots, 79 (2021). OECD Publishing, Publisher"},{"key":"471_CR25","doi-asserted-by":"crossref","unstructured":"Farr F, Riordan E (2024) The affordances of videoconferencing for evidence-based reflective practice and multimodal corpus research in teacher education practicum contexts. In: Routledge Handbook of Technological Advances in Researching Language Learning, pp. 299\u2013310. Routledge","DOI":"10.4324\/9781003459088-27"},{"key":"471_CR26","doi-asserted-by":"crossref","unstructured":"Dhall A, Kaur A, Goecke R, Gedeon T (2018) Emotiw 2018: Audio-video, student engagement and group-level affect prediction. In: Proceedings of the 20th ACM International Conference on Multimodal Interaction, pp. 653\u2013656. ACM","DOI":"10.1145\/3242969.3264993"},{"key":"471_CR27","doi-asserted-by":"publisher","unstructured":"Dhall A (2019) Emotiw 2019: Automatic emotion, engagement and cohesion prediction tasks. In: 2019 International Conference on Multimodal Interaction, pp. 546\u2013550. ACM https:\/\/doi.org\/10.1145\/3340555.3355710","DOI":"10.1145\/3340555.3355710"},{"key":"471_CR28","doi-asserted-by":"publisher","unstructured":"Dhall A, Sharma G, Goecke R, Gedeon T (2020) Emotiw 2020: Driver gaze, group emotion, student engagement and physiological signal based challenges. In: Proceedings of the 2020 International Conference on Multimodal Interaction, pp. 784\u2013789. ACM https:\/\/doi.org\/10.1145\/3382507.3417973","DOI":"10.1145\/3382507.3417973"},{"key":"471_CR29","doi-asserted-by":"publisher","DOI":"10.1037\/t27734-000","author":"P Ekman","year":"1978","unstructured":"Ekman P, Friesen WV (1978) Facial action coding system. Environmental Psychology & Nonverbal Behavior. https:\/\/doi.org\/10.1037\/t27734-000","journal-title":"Environmental Psychology & Nonverbal Behavior"},{"key":"471_CR30","doi-asserted-by":"publisher","unstructured":"Thomas C, Jayagopi DB (2017) Predicting student engagement in classrooms using facial behavioral cues. In: Proceedings of the 1st ACM SIGCHI International Workshop on Multimodal Interaction For Education, pp. 33\u201340. ACM, Glasgow UK. https:\/\/doi.org\/10.1145\/3139513.3139514 . Accessed 2022-01-24","DOI":"10.1145\/3139513.3139514"},{"key":"471_CR31","doi-asserted-by":"publisher","unstructured":"Alkabbany I, Ali A, Farag A, Bennett I, Ghanoum M, Farag A (2019) Measuring Student Engagement Level Using Facial Information. In: 2019 IEEE International Conference on Image Processing (ICIP), pp. 3337\u20133341. IEEE, Taipei, Taiwan. https:\/\/doi.org\/10.1109\/ICIP.2019.8803590 . Accessed 2022-01-24","DOI":"10.1109\/ICIP.2019.8803590"},{"key":"471_CR32","doi-asserted-by":"publisher","unstructured":"Grafsgaard JF, Wiggins JB, Boyer KE, Wiebe EN, Lester JC (2013) Automatically Recognizing Facial Indicators of Frustration: A Learning-centric Analysis. In: 2013 Humaine Association Conference on Affective Computing and Intelligent Interaction, pp. 159\u2013165. IEEE, Geneva, Switzerland. https:\/\/doi.org\/10.1109\/ACII.2013.33 . http:\/\/ieeexplore.ieee.org\/document\/6681424\/ Accessed 2022-02-23","DOI":"10.1109\/ACII.2013.33"},{"key":"471_CR33","doi-asserted-by":"publisher","unstructured":"De\u00a0Carolis B, D\u2019Errico F, Macchiarulo N, Palestra G (2019) \u201cEngaged Faces\u201d: Measuring and Monitoring Student Engagement from Face and Gaze Behavior. In: IEEE\/WIC\/ACM International Conference on Web Intelligence-Companion Volume, pp. 80\u201385. https:\/\/doi.org\/10.1145\/3358695.3361748","DOI":"10.1145\/3358695.3361748"},{"key":"471_CR34","doi-asserted-by":"crossref","unstructured":"Chen Y, Yu Y, Odobez J-M (2015) Head nod detection from a full 3d model. In: Proceedings of the IEEE International Conference on Computer Vision Workshops, pp. 136\u2013144","DOI":"10.1109\/ICCVW.2015.75"},{"key":"471_CR35","doi-asserted-by":"publisher","unstructured":"Wei H, Scanlon P, Li Y, Monaghan DS, O\u2019Connor NE (2013) Real-time head nod and shake detection for continuous human affect recognition. In: 2013 14th International Workshop on Image Analysis for Multimedia Interactive Services (WIAMIS), pp. 1\u20134. IEEE, https:\/\/doi.org\/10.1109\/WIAMIS.2013.6616148","DOI":"10.1109\/WIAMIS.2013.6616148"},{"key":"471_CR36","doi-asserted-by":"publisher","unstructured":"Baltru\u0161aitis T, Robinson P, Morency L-P (2016) Openface: an open source facial behavior analysis toolkit. In: 2016 IEEE Winter Conference on Applications of Computer Vision (WACV), pp. 1\u201310. IEEE, https:\/\/doi.org\/10.1109\/WACV.2016.7477553","DOI":"10.1109\/WACV.2016.7477553"},{"key":"471_CR37","doi-asserted-by":"publisher","unstructured":"Wall E, Schillingmann L, Kummert F (2017) Online nod detection in human-robot interaction. In: 2017 26th IEEE International Symposium on Robot and Human Interactive Communication (RO-MAN), pp. 811\u2013817. IEEE, https:\/\/doi.org\/10.1109\/ROMAN.2017.8172396","DOI":"10.1109\/ROMAN.2017.8172396"},{"key":"471_CR38","doi-asserted-by":"crossref","unstructured":"He K, Zhang X, Ren S, Sun J (2016) Deep residual learning for image recognition. In: Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, pp. 770\u2013778","DOI":"10.1109\/CVPR.2016.90"},{"issue":"1","key":"471_CR39","doi-asserted-by":"publisher","first-page":"59","DOI":"10.3102\/00346543074001059","volume":"74","author":"JA Fredricks","year":"2004","unstructured":"Fredricks JA, Blumenfeld PC, Paris AH (2004) School engagement: Potential of the concept, state of the evidence. Review of educational research 74(1):59\u2013109. https:\/\/doi.org\/10.3102\/00346543074001059. (Publisher: Sage Publications Sage CA: Thousand Oaks, CA)","journal-title":"Review of educational research"},{"issue":"5","key":"471_CR40","first-page":"62","volume":"42","author":"CF Herreid","year":"2013","unstructured":"Herreid CF, Schiller NA (2013) Case Studies and the Flipped Classroom. J Coll Sci Teach 42(5):62\u201366","journal-title":"J Coll Sci Teach"},{"key":"471_CR41","doi-asserted-by":"crossref","unstructured":"Conneau A, Baevski A, Collobert R, Mohamed A, Auli M (2021) Unsupervised cross-lingual representation learning for speech recognition","DOI":"10.21437\/Interspeech.2021-329"},{"key":"471_CR42","doi-asserted-by":"crossref","unstructured":"Yang J, Wang K, Peng X, Qiao Y (2018) Deep recurrent multi-instance learning with spatio-temporal features for engagement intensity prediction. In: Proceedings of the 20th ACM International Conference on Multimodal Interaction, pp. 594\u2013598","DOI":"10.1145\/3242969.3264981"},{"key":"471_CR43","doi-asserted-by":"publisher","unstructured":"Girard JM (2014) CARMA: Software for continuous affect rating and media annotation. Journal of open research software 2(1) https:\/\/doi.org\/10.1145\/3242969.3264981 . Publisher: NIH Public Access","DOI":"10.1145\/3242969.3264981"},{"key":"471_CR44","doi-asserted-by":"crossref","unstructured":"Cudeck R(2000) Exploratory factor analysis. In: Handbook of Applied Multivariate Statistics and Mathematical Modeling, pp. 265\u2013296. Elsevier","DOI":"10.1016\/B978-012691360-6\/50011-2"},{"key":"471_CR45","doi-asserted-by":"publisher","first-page":"625","DOI":"10.1007\/s10459-010-9222-y","volume":"15","author":"G Norman","year":"2010","unstructured":"Norman G (2010) Likert scales, levels of measurement and the \u201claws\u2019\u2019 of statistics. Adv Health Sci Educ 15:625\u2013632","journal-title":"Adv Health Sci Educ"},{"key":"471_CR46","unstructured":"Abdi H (2003) Factor rotations in factor analyses. Encyclopedia for Research Methods for the Social Sciences. Sage: Thousand Oaks, CA, 792\u2013795"},{"issue":"1","key":"471_CR47","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1007\/BF02291170","volume":"38","author":"LR Tucker","year":"1973","unstructured":"Tucker LR, Lewis C (1973) A reliability coefficient for maximum likelihood factor analysis. Psychometrika 38(1):1\u201310","journal-title":"Psychometrika"},{"key":"471_CR48","doi-asserted-by":"publisher","first-page":"53","DOI":"10.5116\/ijme.4dfb.8dfd","volume":"2","author":"M Tavakol","year":"2011","unstructured":"Tavakol M, Dennick R (2011) Making sense of cronbach\u2019s alpha. Int J Med Educ 2:53","journal-title":"Int J Med Educ"},{"issue":"2","key":"471_CR49","first-page":"39","volume":"13","author":"L-T Chen","year":"2020","unstructured":"Chen L-T, Liu L (2020) Methods to analyze likert-type data in educational technology research. Journal of Educational Technology Development and Exchange (JETDE) 13(2):39\u201360","journal-title":"Journal of Educational Technology Development and Exchange (JETDE)"},{"key":"471_CR50","first-page":"2825","volume":"12","author":"F Pedregosa","year":"2011","unstructured":"Pedregosa F, Varoquaux G, Gramfort A, Michel V, Thirion B, Grisel O, Blondel M, Prettenhofer P, Weiss R, Dubourg V, Vanderplas J, Passos A, Cournapeau D, Brucher M, Perrot M, Duchesnay E (2011) Scikit-learn: Machine learning in Python. J Mach Learn Res 12:2825\u20132830","journal-title":"J Mach Learn Res"},{"key":"471_CR51","unstructured":"Brown TA (2015) Confirmatory Factor Analysis for Applied Research. Guilford publications"},{"issue":"2","key":"471_CR52","doi-asserted-by":"publisher","first-page":"19","DOI":"10.1109\/34.908962","volume":"23","author":"Y-l Tian","year":"2001","unstructured":"Tian Y-l, Kanade T, Cohn JF (2001) Recognizing Action Units for Facial Expression Analysis. IEEE TRANSACTIONS ON PATTERN ANALYSIS AND MACHINE INTELLIGENCE 23(2):19. https:\/\/doi.org\/10.1109\/34.908962","journal-title":"IEEE TRANSACTIONS ON PATTERN ANALYSIS AND MACHINE INTELLIGENCE"},{"key":"471_CR53","doi-asserted-by":"publisher","unstructured":"Valstar M, Zafeiriou S, Pantic M. (2017) Facial Actions as Social Signals. In: Burgoon JK, Magnenat-Thalmann N, Pantic M, Vinciarelli A. (eds.) Social Signal Processing, pp. c123\u2013154. Cambridge University Press, Cambridge. https:\/\/doi.org\/10.1017\/9781316676202.011","DOI":"10.1017\/9781316676202.011"}],"container-title":["Journal on Multimodal User Interfaces"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s12193-025-00471-2.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s12193-025-00471-2","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s12193-025-00471-2.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2026,3,17]],"date-time":"2026-03-17T10:28:55Z","timestamp":1773743335000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s12193-025-00471-2"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,12,21]]},"references-count":53,"journal-issue":{"issue":"1","published-print":{"date-parts":[[2026,3]]}},"alternative-id":["471"],"URL":"https:\/\/doi.org\/10.1007\/s12193-025-00471-2","relation":{},"ISSN":["1783-7677","1783-8738"],"issn-type":[{"value":"1783-7677","type":"print"},{"value":"1783-8738","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025,12,21]]},"assertion":[{"value":"23 April 2025","order":1,"name":"received","label":"Received","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"3 December 2025","order":2,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"21 December 2025","order":3,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Declarations"}},{"value":"The authors have no conflicts of interest to declare.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Competing interests"}},{"value":"This work makes use of a publicly available corpus and associated psychometrics and participant recordings. Ethical approval already has been sought for this work. No additional participant data was collected for this study.","order":3,"name":"Ethics","group":{"name":"EthicsHeading","label":"Ethical Approval"}}]}}