{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,27]],"date-time":"2026-03-27T12:19:35Z","timestamp":1774613975769,"version":"3.50.1"},"reference-count":65,"publisher":"Springer Science and Business Media LLC","issue":"3","license":[{"start":{"date-parts":[[2013,4,26]],"date-time":"2013-04-26T00:00:00Z","timestamp":1366934400000},"content-version":"tdm","delay-in-days":0,"URL":"http:\/\/www.springer.com\/tdm"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["J Multimodal User Interfaces"],"published-print":{"date-parts":[[2013,11]]},"DOI":"10.1007\/s12193-013-0122-3","type":"journal-article","created":{"date-parts":[[2013,5,1]],"date-time":"2013-05-01T12:53:22Z","timestamp":1367412802000},"page":"195-206","source":"Crossref","is-referenced-by-count":5,"title":["Induction, recording and recognition of natural emotions from facial expressions and speech prosody"],"prefix":"10.1007","volume":"7","author":[{"given":"Kostas","family":"Karpouzis","sequence":"first","affiliation":[]},{"given":"George","family":"Caridakis","sequence":"additional","affiliation":[]},{"given":"Roddy","family":"Cowie","sequence":"additional","affiliation":[]},{"given":"Ellen","family":"Douglas-Cowie","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2013,4,26]]},"reference":[{"key":"122_CR1","doi-asserted-by":"crossref","unstructured":"Abassi AR, Uno T, Dailey M, Afzulpurkar NV (2007) Towards knowledge-based affective interaction: situational interpretation of affect. In: Paiva A, Prada R, Picard R (eds) Affective computing and intelligent interaction. Springer LNCS, Lisbon, Berlin, pp 452\u2013463","DOI":"10.1007\/978-3-540-74889-2_40"},{"key":"122_CR2","unstructured":"Auberge V, Audibert N, Rilliard A (2004) E-Wiz: a trapper protocol for hunting the expressive speech corpora in lab. In Proceedings of 4th international conference on language resources and evaluation (LREC), pp 179\u2013182"},{"issue":"2","key":"122_CR3","doi-asserted-by":"crossref","first-page":"53","DOI":"10.1111\/1467-8721.00013","volume":"8","author":"JA Bachorowski","year":"1999","unstructured":"Bachorowski JA (1999) Vocal expression and perception of emotion. Curr Dir Psychol Sci 8(2):53\u201357","journal-title":"Curr Dir Psychol Sci"},{"key":"122_CR4","doi-asserted-by":"crossref","unstructured":"Batliner A, Fischer K, Huber R, Spilker J, Noeth E (2003) How to find trouble in communication. Speech Commun 40:117\u2013143","DOI":"10.1016\/S0167-6393(02)00079-1"},{"key":"122_CR5","doi-asserted-by":"crossref","first-page":"7","DOI":"10.1016\/0010-0277(94)90018-3","volume":"50","author":"A Bechara","year":"1994","unstructured":"Bechara A, Damasio A, Damasio H, Anderson S (1994) Insensitivity to future consequences following damage to human prefrontal cortex. Cognition 50:7\u201315","journal-title":"Cognition"},{"key":"122_CR6","unstructured":"Busso C, Narayanan S (2008) Recording audio-visual emotional databases from actors: a closer look. In: Proceedings of international conference on language resources and evaluation, workshop on emotion: corpora for research on emotion and affect, Marrakech, Morocco"},{"key":"122_CR7","doi-asserted-by":"crossref","unstructured":"Caridakis G, Karpouzis K, Wallace M, Kessous L, Amir N (2010) Multimodal user\u2019s affective state analysis in naturalistic interaction. J Multimodal User Interfaces 3(1\u20132):49\u201366 (Springer)","DOI":"10.1007\/s12193-009-0030-8"},{"key":"122_CR8","doi-asserted-by":"crossref","unstructured":"Caridakis G, Karpouzis K, Kollias S (2008) User and context adaptive neural networks for emotion recognition. Neurocomputing 71(13\u201315):2553\u20132562 (Elsevier)","DOI":"10.1016\/j.neucom.2007.11.043"},{"key":"122_CR9","doi-asserted-by":"crossref","unstructured":"Caridakis G, Malatesta L, Kessous L, Amir N, Raouzaiou A, Karpouzis K (2006) Modeling naturalistic affective states via facial and vocal expressions recognition. In: Proceedings of international conference on multimodal interfaces, Banff, Alberta","DOI":"10.1145\/1180995.1181029"},{"key":"122_CR10","unstructured":"Caridakis G, Raouzaiou A, Karpouzis K, Kollias S (2006) Synthesizing gesture expressivity based on real sequences. In: Proceedings of workshop on multimodal corpora: from multimodal behaviour theories to usable models, LREC, 2006 Conference. Genoa"},{"key":"122_CR11","doi-asserted-by":"crossref","unstructured":"Castellano G, Kessous L, Caridakis G (2008) Emotion recognition through multiple modalities: face, body gesture, speech. In: Peter C, Beale R (eds) Affect and emotion in human-computer interaction. LNCS, vol 4868. Springer, Heidelberg","DOI":"10.1007\/978-3-540-85099-1_8"},{"key":"122_CR12","doi-asserted-by":"crossref","unstructured":"Chen J, Wechsler H (2007) Human computer intelligent interaction using augmented cognition and emotional intelligence. LNCS Volume 4563\/2007. Springer, Berlin, pp 205\u2013214","DOI":"10.1007\/978-3-540-73335-5_23"},{"issue":"1","key":"122_CR13","doi-asserted-by":"crossref","first-page":"11","DOI":"10.1207\/s15327965pli1601_02","volume":"16","author":"R Cowie","year":"2005","unstructured":"Cowie R (2005) What are people doing when they assign everyday emotion terms? Psychol Inq 16(1):11\u201318","journal-title":"Psychol Inq"},{"key":"122_CR14","doi-asserted-by":"crossref","unstructured":"Cowie R, Cornelius R (2003) Describing the emotional states that are expressed in speech. Speech Commun 40:5\u201332 (Elsevier)","DOI":"10.1016\/S0167-6393(02)00071-7"},{"key":"122_CR15","unstructured":"Cowie R, Douglas-Cowie E, Apolloni B, Taylor J, Romano A, Fellenz W (1999) What a neural net needs to know about emotion words. In: Mastorakis N (ed) Computational intelligence and applications. World Scientific Engineering Society, pp 109\u2013114"},{"key":"122_CR16","doi-asserted-by":"crossref","unstructured":"Cowie R, Douglas-Cowie E, Karpouzis K, Caridakis G, Wallace M, Kollias S (2008) Recognition of emotional States in natural human-computer interaction. In: Tzovaras D (ed) Multimodal user interfaces. Springer Berlin, pp 119\u2013153","DOI":"10.1007\/978-3-540-78345-9_6"},{"key":"122_CR17","doi-asserted-by":"crossref","unstructured":"Cowie R, Douglas-Cowie E, Mckeown G, Gibney C The challenges of dealing with distributed signs of emotion: theory and empirical evidence. In: Proceedings ACII 2009 (published IEEE), vol 1 pp 351\u2013356","DOI":"10.1109\/ACII.2009.5349542"},{"key":"122_CR18","unstructured":"Cowie R, Douglas-Cowie E, Savvidou S, Mcmahon E, Sawey M, Schroeder M (2000) FEELTRACE: an instrument for recording perceived emotion in real time. In: Proceedings of ISCA workshop on speech and emotion, Northern Ireland, pp 19\u201324"},{"key":"122_CR19","doi-asserted-by":"crossref","unstructured":"Cowie R, Douglas-Cowie E, Tsapatsoulis N, Votsis G, Kollias S, Fellenz W, Taylor J (2001) Emotion recognition in human-computer interaction. IEEE Signal Process Mag 33\u201380","DOI":"10.1109\/79.911197"},{"key":"122_CR20","unstructured":"Cowie R, Mckeown G (2010) Statistical analysis of data from initial labelled database and recommendations for an economical coding scheme SEMAINE deliverable D6b Downloaded from http:\/\/www.semaine-project.eu\/ . 10\/11\/2010"},{"key":"122_CR21","doi-asserted-by":"crossref","unstructured":"Dahlbaeck N, Jonsson A, Ahrenberg L (1993) Wizard of Oz studies: why and how. In: Proceedings of 1st international conference on intelligent user interfaces, Orlando, Florida, pp 193\u2013200","DOI":"10.1145\/169891.169968"},{"key":"122_CR22","unstructured":"Devillers L, Cowie R, Martin J.-C, Douglas-Cowie E, Abrilian S, Mcrorie M (2006) Real life emotions in French and English TV video clips: an integrated annotation protocol combining continuous and discrete approaches. In: Proceedings of 5th international conference on language resources and evaluation, Genoa"},{"key":"122_CR23","unstructured":"Dhall, A, Goecke R, Lucey, S, Gedeon T (2011) Acted facial expressions in the wild database. Technical Report TR-CS-11-02, Australian National University"},{"key":"122_CR24","doi-asserted-by":"crossref","unstructured":"Douglas-Cowie E, Campbell N, Cowie R, Roach P (2003) Emotional speech: towards a new generation of databases. Speech Commun 40:33\u201360 (Elsevier)","DOI":"10.1016\/S0167-6393(02)00070-5"},{"key":"122_CR25","doi-asserted-by":"crossref","unstructured":"Douglas-Cowie E, Cowie R, Sneddon I, Cox C, Lowry O, Mcrorie M, Martin J-C, Devillers L, Abrilian S, Batliner A, Amir N, Karpouzis K (2007) The HUMAINE database: addressing the collection and annotation of naturalistic and induced emotional data. In: Proceedings of 2nd international conference on affective computing and intelligent interaction, Lisbon","DOI":"10.1007\/978-3-540-74889-2_43"},{"key":"122_CR26","doi-asserted-by":"crossref","unstructured":"Duric Z (2002) Integrating perceptual and cognitive modeling for adaptive and intelligent human-computer interaction. Proc IEEE 90(7):1272\u20131289","DOI":"10.1109\/JPROC.2002.801449"},{"issue":"4","key":"122_CR27","doi-asserted-by":"crossref","first-page":"384","DOI":"10.1037\/0003-066X.48.4.384","volume":"48","author":"P Ekman","year":"1993","unstructured":"Ekman P (1993) Facial expression and emotion. Am Psychol 48(4):384\u2013392","journal-title":"Am Psychol"},{"key":"122_CR28","doi-asserted-by":"crossref","unstructured":"Ekman P, Friesen W (1978) The facial action coding system: a technique for the measurement of facial movement. Consulting Psychologists Press, San Francisco","DOI":"10.1037\/t27734-000"},{"key":"122_CR29","doi-asserted-by":"crossref","first-page":"179","DOI":"10.1207\/s15516709cog1402_1","volume":"14","author":"JL Elman","year":"1990","unstructured":"Elman JL (1990) Finding structure in time. Cognit Sci 14:179\u2013211","journal-title":"Cognit Sci"},{"key":"122_CR30","unstructured":"Emotionally Rich Man-machine Intelligent System (Ermis) (2008) IST-2000-29319. http:\/\/www.image.ntua.gr\/ermis . Last retrieved 1 Sept 2008"},{"key":"122_CR31","doi-asserted-by":"crossref","unstructured":"Eyben F, Wollmer M, Schuller B (2009) openEAR\u2013introducing the Munich open-source emotion and affect recognition toolkit. In: Proceedings of of ACII. Amsterdam, The Netherlands, pp 576\u2013581","DOI":"10.1109\/ACII.2009.5349350"},{"key":"122_CR32","doi-asserted-by":"crossref","unstructured":"Eyben F, Wollmer M, Graves A, Schuller B, Douglas-Cowie E, Cowie R (2010) On-line emotion recognition in a 3-D activation-valence-time continuum using acoustic and linguistic cues. J Multimodal User Interfaces 3:7\u201319 (Special Issue on Real-time Affect Analysis and Interpretation: Closing the Loop in Virtual Agents, Springer)","DOI":"10.1007\/s12193-009-0032-6"},{"key":"122_CR33","doi-asserted-by":"crossref","unstructured":"Fragopanagos N, Taylor J (2005) Emotion recognition in human-computer interaction. Neural Netw 18:389\u2013405 (Elsevier)","DOI":"10.1016\/j.neunet.2005.03.006"},{"key":"122_CR34","unstructured":"Frijda NH (1986) The emotions, studies in emotion and social interaction. Cambridge University Press, New York"},{"key":"122_CR35","unstructured":"Humaine Database. http:\/\/www.emotion-research.net\/download\/pilot-db . Last retrieved 1 September 2008"},{"key":"122_CR36","unstructured":"Humaine IST, Human-Machine Interaction Network on Emotion, 2004\u20132007. http:\/\/www.emotion-research.net . Last retrieved 1 Sept 2008"},{"key":"122_CR37","doi-asserted-by":"crossref","unstructured":"Ioannou S, Caridakis G, Karpouzis K, Kollias S (2007) Robust feature detection for facial expression recognition. EURASIP J Image Video Process (2)","DOI":"10.1155\/2007\/29081"},{"key":"122_CR38","doi-asserted-by":"crossref","unstructured":"Ioannou S, Raouzaiou A, Tzouvaras V, Mailis T, Karpouzis K, Kollias S (2005) Emotion recognition through facial expression analysis based on a neurofuzzy network. Neural Netw 18(4):423\u2013435. (Special Issue on Emotion: Understanding & Recognition, Elsevier)","DOI":"10.1016\/j.neunet.2005.03.004"},{"key":"122_CR39","doi-asserted-by":"crossref","unstructured":"Laptev I, Marszalek M, Schmid C, Rozenfeld B (2008) Learning realistic human actions from movies. In: IEEE Conference on computer vision and pattern recognition, 2008. CVPR 2008. IEEE, London, pp 1\u20138","DOI":"10.1109\/CVPR.2008.4587756"},{"key":"122_CR40","doi-asserted-by":"crossref","DOI":"10.1093\/oso\/9780195069945.001.0001","volume-title":"Emotion and adaptation","author":"RS Lazarus","year":"1991","unstructured":"Lazarus RS (1991) Emotion and adaptation. Oxford University Press, New York"},{"key":"122_CR41","doi-asserted-by":"crossref","unstructured":"Lazarus RS, Folkman S (1987) Transactional theory and research on emotions and coping. Eur J Pers 1(3):141\u2013169","DOI":"10.1002\/per.2410010304"},{"key":"122_CR42","unstructured":"Martin J-C, Devillers L, Zara A, Maffiolo V, Lechenadec G (2006) The EmoTABOU corpus. Humaine Summer School, Genova, Italy, September 22\u201328"},{"key":"122_CR43","unstructured":"Mcgilloway S, Cowie R, Douglas-Cowie E, Gielen S, Westerdijk M, Stroeve S (2000) Approaching automatic recognition of emotion from voice: a rough benchmark. In: Proceedings of the ISCA workshop on speech and emotion"},{"key":"122_CR44","doi-asserted-by":"crossref","unstructured":"Mertens P (2004) The prosogram: semi-automatic transcription of prosody based on a tonal perception model. In: Bel B, Marlien I (eds) Proceedings of of Speech Prosody, Japan","DOI":"10.21437\/SpeechProsody.2004-127"},{"key":"122_CR45","doi-asserted-by":"crossref","unstructured":"Ortony A, Collins A, Clore GL (1988) The cognitive structure of emotions. Cambridge University Press, Cambridge","DOI":"10.1017\/CBO9780511571299"},{"key":"122_CR46","doi-asserted-by":"crossref","unstructured":"Pantic M, Rothkrantz L (2000) Automatic analysis of facial expressions: the state of the art. IEEE Trans. Pattern Anal Mach Intell 22(12):1424\u20131445","DOI":"10.1109\/34.895976"},{"key":"122_CR47","doi-asserted-by":"crossref","unstructured":"Pantic M, Sebe N, Cohn J, Huang T (2005) Affective multimodal human-computer interaction. In: Proceedings of the 13th annual ACM international conference on Multimedia, pp 669\u2013676","DOI":"10.1145\/1101149.1101299"},{"key":"122_CR48","doi-asserted-by":"crossref","DOI":"10.1037\/e526112012-054","volume-title":"Affective computing","author":"R Picard","year":"1997","unstructured":"Picard R (1997) Affective computing. MIT Press, Cambridge"},{"issue":"10","key":"122_CR49","doi-asserted-by":"crossref","first-page":"1021","DOI":"10.1155\/S1110865702206149","volume":"2002","author":"A Raouzaiou","year":"2002","unstructured":"Raouzaiou A, Tsapatsoulis N, Karpouzis K, Kollias S (2002) Parameterized facial expression synthesis based on MPEG-4. EURASIP J Appl Signal Process 2002(10):1021\u20131038","journal-title":"EURASIP J Appl Signal Process"},{"key":"122_CR50","doi-asserted-by":"crossref","first-page":"805","DOI":"10.1037\/0022-3514.76.5.805","volume":"76","author":"JA Russell","year":"1999","unstructured":"Russell JA, Feldman-Barrett L (1999) Core affect, prototypical emotional episodes, and other things called emotion: dissecting the elephant. J Pers Soc Psychol 76:805\u2013819","journal-title":"J Pers Soc Psychol"},{"key":"122_CR51","doi-asserted-by":"crossref","unstructured":"Schaefer A, Zimmermann HG (2007) Recurrent neural networks are universal approximators. Int J Neural Syst 17(4):253\u2013 263","DOI":"10.1142\/S0129065707001111"},{"key":"122_CR52","first-page":"1","volume":"1","author":"KR Scherer","year":"1987","unstructured":"Scherer KR (1987) Toward a dynamic theory of emotion: the component process model of affective states. Geneva Stud Emot Commun 1:1\u201398","journal-title":"Geneva Stud Emot Commun"},{"key":"122_CR53","volume-title":"Handbook of pattern recognition and computer vision","author":"N Sebe","year":"2005","unstructured":"Sebe N, Cohen I, Huang TS (2005) Handbook of pattern recognition and computer vision. World Scientific, River Edge"},{"key":"122_CR54","unstructured":"Semaine IST, The sensitive agent project. http:\/\/www.semaine-project.eu . Last retrieved 1 Sept 2008"},{"key":"122_CR55","doi-asserted-by":"crossref","unstructured":"Valstar M, Gunes H, Pantic M (2007) How to distinguish posed from spontaneous smiles using geometric features. In:. Massaro D, Takeda K, Roy D, Potamianos A (eds) Proceedings of the 9th international conference on multimodal interfaces, ICMI 2007, Nagoya, Aichi, Japan, November 12\u201315, pp 38\u201345","DOI":"10.1145\/1322192.1322202"},{"key":"122_CR56","unstructured":"Valstar M, Pantic M, Ambadar Z, Cohn J (2006) Spontaneous vs. posed facial behavior: automatic analysis of brow actions. In: Proceedings of the 8th international conference on multimodal interfaces, ACM, New York, pp 162\u2013170"},{"key":"122_CR57","doi-asserted-by":"crossref","unstructured":"van Reekum C, Johnstone T, Banse R, Etter A, Wehrle T, Scherer K (2004) Psychophysiological responses to appraisal dimensions in a computer game. Cognit Emot 18(663\u2013688)","DOI":"10.1080\/02699930341000167"},{"key":"122_CR58","first-page":"72","volume":"35","author":"E Velten","year":"1998","unstructured":"Velten E (1998) A laboratory task for induction of mood states. Behav Res Therapy 35:72\u201382","journal-title":"Behav Res Therapy"},{"key":"122_CR59","doi-asserted-by":"crossref","unstructured":"Wallace M, Ioannou S, Raouzaiou A, Karpouzis K, Kollias S (2006) Dealing with feature uncertainty in facial expression recognition using possibilistic fuzzy rule evaluation. Int J Intell Syst Technol Appl 1(3\u20134)","DOI":"10.1504\/IJISTA.2006.009916"},{"key":"122_CR60","doi-asserted-by":"crossref","unstructured":"Wang N, Marsella S (2006) Evg: an emotion evoking game. In: Proceedings of 6th international conference on intelligent virtual agents. Springer LNCS, pp 282\u2013291","DOI":"10.1007\/11821830_23"},{"key":"122_CR61","doi-asserted-by":"crossref","unstructured":"Weizenbaum J (1966) ELIZA\u2014a computer program for the study of natural language communication between man and machine. Commun ACM 9(1):35\u201336","DOI":"10.1145\/365153.365168"},{"key":"122_CR62","unstructured":"Whissel CM (1989) The dictionary of affect in language. In: Plutchnik R, Kellerman H (eds) Emotion: theory, research and experience: vol 4, the measurement of emotions. Academic Press, New York"},{"key":"122_CR63","unstructured":"Young JW (1993) Head and face anthropometry of adult U.S. civilians. FAA Civil Aeromedical Institute, 1963\u20131993"},{"key":"122_CR64","doi-asserted-by":"crossref","unstructured":"Zeng Z, Pantic M, Roisman G, Huang TS (2007) A survey of affect recognition methods: audio, visual and spontaneous expressions. In: Proceedings of the 9th international conference on multimodal interfaces. ACM, New York, pp 126\u2013133","DOI":"10.1145\/1322192.1322216"},{"key":"122_CR65","doi-asserted-by":"crossref","unstructured":"Zeng Z, Pantic M, Roisman G, Huang T (2009) A survey of affect recognition methods: audio, visual, and spontaneous expressions. IEEE Trans Pattern Anal Mach Intell 31(1):39\u201358","DOI":"10.1109\/TPAMI.2008.52"}],"container-title":["Journal on Multimodal User Interfaces"],"original-title":[],"language":"en","link":[{"URL":"http:\/\/link.springer.com\/content\/pdf\/10.1007\/s12193-013-0122-3.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"http:\/\/link.springer.com\/article\/10.1007\/s12193-013-0122-3\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"http:\/\/link.springer.com\/content\/pdf\/10.1007\/s12193-013-0122-3","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,5,9]],"date-time":"2024-05-09T22:40:45Z","timestamp":1715294445000},"score":1,"resource":{"primary":{"URL":"http:\/\/link.springer.com\/10.1007\/s12193-013-0122-3"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2013,4,26]]},"references-count":65,"journal-issue":{"issue":"3","published-print":{"date-parts":[[2013,11]]}},"alternative-id":["122"],"URL":"https:\/\/doi.org\/10.1007\/s12193-013-0122-3","relation":{},"ISSN":["1783-7677","1783-8738"],"issn-type":[{"value":"1783-7677","type":"print"},{"value":"1783-8738","type":"electronic"}],"subject":[],"published":{"date-parts":[[2013,4,26]]}}}