{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,8]],"date-time":"2026-03-08T02:33:13Z","timestamp":1772937193425,"version":"3.50.1"},"reference-count":93,"publisher":"Springer Science and Business Media LLC","issue":"6","license":[{"start":{"date-parts":[[2020,9,29]],"date-time":"2020-09-29T00:00:00Z","timestamp":1601337600000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springer.com\/tdm"},{"start":{"date-parts":[[2020,9,29]],"date-time":"2020-09-29T00:00:00Z","timestamp":1601337600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springer.com\/tdm"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["SN COMPUT. SCI."],"published-print":{"date-parts":[[2020,11]]},"DOI":"10.1007\/s42979-020-00263-3","type":"journal-article","created":{"date-parts":[[2020,9,29]],"date-time":"2020-09-29T17:52:38Z","timestamp":1601401958000},"update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":10,"title":["On Designing Expressive Robot Behavior: The Effect of Affective Cues on Interaction"],"prefix":"10.1007","volume":"1","author":[{"given":"Amir","family":"Aly","sequence":"first","affiliation":[]},{"given":"Adriana","family":"Tapus","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2020,9,29]]},"reference":[{"key":"263_CR1","unstructured":"Aly A. Towards an interactive human-robot relationship: developing a customized robot behavior to human profile. PhD thesis, ENSTA ParisTech, France; 2015."},{"key":"263_CR2","doi-asserted-by":"crossref","unstructured":"Aly A, Tapus A. Towards an online voice-based gender and internal state detection model. In: Proceedings of the 6th ACM\/IEEE human-robot interaction conference (HRI), Switzerland; 2011.","DOI":"10.1145\/1957656.1957683"},{"key":"263_CR3","doi-asserted-by":"publisher","first-page":"183","DOI":"10.1007\/978-3-642-27449-7_14","volume-title":"Service orientation in holonic and multi-agent manufacturing control: studies in computational intelligence","author":"A Aly","year":"2012","unstructured":"Aly A, Tapus A. A model for mapping speech to head gestures in human-robot interaction. In: Borangiu T, Thomas A, Trentesaux D, editors. Service orientation in holonic and multi-agent manufacturing control: studies in computational intelligence. Heidelberg: Springer; 2012. p. 183\u201396."},{"key":"263_CR4","doi-asserted-by":"crossref","unstructured":"Aly A, Tapus A. Prosody-driven robot arm gestures generation in human-robot interaction. In: Proceedings of the 7th ACM\/IEEE human-robot interaction conference (HRI), Massachusetts; 2012.","DOI":"10.1145\/2157689.2157783"},{"key":"263_CR5","doi-asserted-by":"crossref","unstructured":"Aly A, Tapus A. Prosody-based adaptive metaphoric head and arm gestures synthesis in human robot interaction. In: Proceedings of the 16th IEEE international conference on advanced robotics (ICAR), Montevideo; 2013. pp 1\u20138.","DOI":"10.1109\/ICAR.2013.6766507"},{"key":"263_CR6","unstructured":"Aly A, Tapus A. Towards enhancing human-robot relationship: customized robot\u2019s behavior to human\u2019s profile. In: Proceedings of the AAAI fall symposium on AI for human-robot interaction (AI-HRI), Virginia; 2014."},{"key":"263_CR7","doi-asserted-by":"crossref","unstructured":"Aly A, Tapus A. Multimodal adapted robot behavior synthesis within a narrative human-robot interaction. In: Proceedings of the IEEE\/RSJ international conference on intelligent robots and systems (IROS), Hamburg; 2015. p. 2986\u201393.","DOI":"10.1109\/IROS.2015.7353789"},{"key":"263_CR8","series-title":"Springer tracts in advanced robotics (STAR)","doi-asserted-by":"publisher","first-page":"185","DOI":"10.1007\/978-3-319-12922-8_7","volume-title":"Intelligent assistive robots: recent advances in assistive robotics for everyday activities","author":"A Aly","year":"2015","unstructured":"Aly A, Tapus A. An online fuzzy-based approach for human emotions detection: an overview on the human cognitive model of understanding and generating multimodal actions. In: Mohammed S, Moreno J, Kong K, Amirat Y, editors. Intelligent assistive robots: recent advances in assistive robotics for everyday activities. Springer tracts in advanced robotics (STAR), vol. 106. Switzerland: Springer International Publishing; 2015. p. 185\u2013212."},{"issue":"2","key":"263_CR9","doi-asserted-by":"publisher","first-page":"193","DOI":"10.1007\/s10514-015-9444-1","volume":"40","author":"A Aly","year":"2016","unstructured":"Aly A, Tapus A. Towards an intelligent system for generating an adapted verbal and nonverbal combined behavior in human-robot interaction. Auton Robots. 2016;40(2):193\u2013209.","journal-title":"Auton Robots"},{"key":"263_CR10","unstructured":"Aly A, Taniguchi T, Mochihashi D. A Bayesian approach to phrase understanding through cross-situational learning. In: International workshop on visually grounded interaction and language (ViGIL), in conjunction with the 32nd conference on neural information processing systems (NeurIPS), Montreal; 2018."},{"key":"263_CR11","doi-asserted-by":"crossref","unstructured":"Aly A, Taniguchi T, Mochihashi D. A probabilistic approach to unsupervised induction of combinatory categorial grammar in situated human-robot interaction. In: Proceedings of the 18th IEEE-RAS international conference on humanoid robots (Humanoids), Beijing; 2018. p. 1\u20139.","DOI":"10.1109\/HUMANOIDS.2018.8625009"},{"issue":"17","key":"263_CR12","first-page":"253","volume":"4","author":"M Atta","year":"2013","unstructured":"Atta M, Ather M, Bano M. Emotional intelligence and personality traits among university teachers: relationship and gender differences. Int J Bus Soc Sci. 2013;4(17):253\u20139.","journal-title":"Int J Bus Soc Sci"},{"key":"263_CR13","unstructured":"Beira R, Lopes M, Praga M, Santos-Victor J, Bernardino A, Metta G, Becchi F, Saltaren R. Design of the robot-cub (iCub) head. In: Proceedings of the IEEE international conference on robotics and automation (ICRA), USA; 2006. p. 94\u2013100."},{"issue":"6","key":"263_CR14","doi-asserted-by":"publisher","first-page":"961","DOI":"10.1080\/02699931.2012.751899","volume":"27","author":"MT Boden","year":"2013","unstructured":"Boden MT, Thompson RJ, Diz\u00e9n M, Berenbaum H, Baker JP. Are emotional clarity and emotion differentiation related? Cogn Emot. 2013;27(6):961\u201378.","journal-title":"Cogn Emot"},{"key":"263_CR15","doi-asserted-by":"publisher","first-page":"167","DOI":"10.1016\/S0921-8890(02)00373-1","volume":"42","author":"C Breazeal","year":"2003","unstructured":"Breazeal C. Towards sociable robots. Robot Auton Syst. 2003;42:167\u201375.","journal-title":"Robot Auton Syst"},{"key":"263_CR16","unstructured":"Breemen AV, Yan X, Meerbeek B. iCat: an animated user-interface robot with personality. In: Proceedings of the 4th international conference on autonomous agents and multiagent systems (AAMAS), Utrecht; 2005."},{"key":"263_CR17","doi-asserted-by":"crossref","unstructured":"Busso C, Deng Z, Yildirim S, Bulut M, Lee C, Kazemzadeh A, Lee S, Neumann U, Narayanan S. Analysis of emotion recognition using facial expressions, speech, and multimodal information. In: Proceedings of the 6th international conference on multimodal interfaces (ICMI), New York; 2004. p. 205\u201311.","DOI":"10.1145\/1027933.1027968"},{"key":"263_CR18","volume-title":"Artificial intelligence and innovations 2007: from theory to applications (AIAI 2007)","author":"G Caridakis","year":"2007","unstructured":"Caridakis G, Castellano G, Kessous L, Raouzaiou A, Malatesta L, Asteriadis S, Karpouzis K. Multimodal emotion recognition from expressive faces, body gestures and speech. In: Boukis C, Pnevmatikakis A, Polymenakos L, editors. Artificial intelligence and innovations 2007: from theory to applications (AIAI 2007), vol. 247. Boston: Springer; 2007."},{"key":"263_CR19","doi-asserted-by":"publisher","first-page":"29","DOI":"10.7551\/mitpress\/2697.001.0001","volume-title":"Embodied conversational agents","author":"J Cassell","year":"2000","unstructured":"Cassell J, Bickmore T, Campbell L, Vilhj\u00e1lmsson H, Yan H. Human conversation as a system framework: designing embodied conversational agents. In: Cassell J, Sullivan J, Prevost S, Churchill E, editors. Embodied conversational agents. Cambridge: MIT Press; 2000, p. 29\u201363."},{"key":"263_CR20","doi-asserted-by":"crossref","unstructured":"Cassell J, Vilhj\u00e1lmsson HH, Bickmore T. BEAT: The behavior expression animation toolkit. In: Proceedings of the SIGGRAPH; 2001. pp.477\u201386.","DOI":"10.1145\/383259.383315"},{"key":"263_CR21","series-title":"Lecture notes in computer science","volume-title":"Affect and emotion in human computer interaction","author":"G Castellano","year":"2007","unstructured":"Castellano G, Kessous L, Caridakis G. Emotion recognition through multiple modalities: Face, body gesture, speech. In: Peter C, Beale R, editors. Affect and emotion in human computer interaction. Lecture notes in computer science, vol. 4868, Heidelberg: Springer; 2007."},{"key":"263_CR22","doi-asserted-by":"crossref","unstructured":"Chiu CC, Morency LP, Marsella S. Predicting co-verbal gestures: A deep and temporal modeling approach. In: Proceedings of the ACM international conference on intelligent virtual agents (IVA); 2015. p. 152\u201366.","DOI":"10.1007\/978-3-319-21996-7_17"},{"key":"263_CR23","doi-asserted-by":"crossref","unstructured":"Clavel C, Plessier J, Martin JC, Ach L, Morel B. Combining facial and postural expressions of emotions in a virtual character. In: Proceedings of the 9th international conference on intelligent virtual agents (IVA); 2009. p. 287\u2013300.","DOI":"10.1007\/978-3-642-04380-2_31"},{"issue":"4","key":"263_CR24","doi-asserted-by":"publisher","first-page":"671","DOI":"10.1080\/02699930302304","volume":"17","author":"E Coffey","year":"2003","unstructured":"Coffey E, Berenbaum H, Kerns JG. The dimensions of emotional intelligence, alexithymia, and mood awareness: Associations with personality and performance on an emotional stroop task. Cogn Emot. 2003;17(4):671\u20139.","journal-title":"Cogn Emot"},{"key":"263_CR25","series-title":"Lecture notes in computer science","doi-asserted-by":"publisher","first-page":"542","DOI":"10.1007\/978-3-319-02675-6_54","volume-title":"Social robotics (ICSR)","author":"S Costa","year":"2013","unstructured":"Costa S, Soares F, Santos C. Facial expressions and gestures to convey emotions with a humanoid robot. In: Herrmann G, Pearson MJ, Lenz A, Bremner P, Spiers A, Leonards U, editors. Social robotics (ICSR). Lecture notes in computer science, vol. 8239. Berlin: Springer; 2013. p. 542\u201351."},{"key":"263_CR26","doi-asserted-by":"publisher","first-page":"5","DOI":"10.1016\/S0167-6393(02)00071-7","volume":"40","author":"R Cowie","year":"2003","unstructured":"Cowie R, Cornelius R. Describing the emotional states that are expressed in speech. Speech Commun. 2003;40:5\u201332.","journal-title":"Speech Commun"},{"key":"263_CR27","doi-asserted-by":"crossref","unstructured":"Edgington M. Investigating the limitations of concatenative synthesis. In: Proceedings of Eurospeech, Greece 1997.","DOI":"10.21437\/Eurospeech.1997-217"},{"key":"263_CR28","first-page":"169","volume-title":"Human ethology: claims and limits of a new discipline: contributions to the colloquium","author":"P Ekman","year":"1979","unstructured":"Ekman P. About brows: emotional and conversational signal. In: von Cranach M, Foppa K, Lepenies W, Ploog D, editors. Human ethology: claims and limits of a new discipline: contributions to the colloquium. Cambridge: Cambridge University Press; 1979. p. 169\u2013248."},{"key":"263_CR29","doi-asserted-by":"publisher","first-page":"49","DOI":"10.1515\/semi.1969.1.1.49","volume":"1","author":"P Ekman","year":"1969","unstructured":"Ekman P, Friesen WV. The repertoire of nonverbal behavior: categories, origins, usage, and coding. Semiotica. 1969;1:49\u201398.","journal-title":"Semiotica"},{"key":"263_CR30","volume-title":"Facial action coding system: a technique for the measurement of facial movement","author":"P Ekman","year":"1978","unstructured":"Ekman P, Friesen WV. Facial action coding system: a technique for the measurement of facial movement. Palo Alto: Consulting Psychologists Press; 1978."},{"key":"263_CR31","unstructured":"Gibiansky A, Arik S, Diamos G, Miller J, Peng K, Ping W, Raiman J, Zhou Y. Deep voice 2: multi-speaker neural text-to-speech. In: Proceedings of the international conference on neural information processing systems (NIPS), Long Beach; 2017. p. 2962\u201370."},{"key":"263_CR32","doi-asserted-by":"publisher","first-page":"1216","DOI":"10.1037\/0022-3514.59.6.1216","volume":"59","author":"LR Goldberg","year":"1990","unstructured":"Goldberg LR. An alternative description of personality: the big-five factor structure. Personal Soc Psychol. 1990;59:1216\u20131229.","journal-title":"Personal Soc Psychol"},{"issue":"1771","key":"263_CR33","doi-asserted-by":"publisher","first-page":"20180026","DOI":"10.1098\/rstb.2018.0026","volume":"374","author":"H Gunes","year":"2019","unstructured":"Gunes H, Celiktutan O, Sariyanidi E. Live human-robot interactive public demonstrations with automatic emotion and personality prediction. Philos Trans R Soc B. 2019;374(1771):20180026.","journal-title":"Philos Trans R Soc B"},{"key":"263_CR34","doi-asserted-by":"crossref","unstructured":"Hasegawa D, Kaneko N, Shirakawa S, Sakuta H, Sumi K. Evaluation of speech-to-gesture generation using bi-directional LSTM network. In: Proceedings of the ACM international conference on intelligent virtual agents (IVA), Sydney; 2018.","DOI":"10.1145\/3267851.3267878"},{"issue":"2","key":"263_CR35","doi-asserted-by":"publisher","first-page":"143","DOI":"10.1080\/02699930125768","volume":"15","author":"D Hermans","year":"2001","unstructured":"Hermans D, Houwer JD, Eelen P. A time course analysis of the affective priming effect. Cogn Emot. 2001;15(2):143\u201365.","journal-title":"Cogn Emot"},{"issue":"7","key":"263_CR36","doi-asserted-by":"publisher","first-page":"1095","DOI":"10.1080\/02699930541000084","volume":"19","author":"J Hewig","year":"2005","unstructured":"Hewig J, Hagemann D, Seifert J, Gollwitzer M, Naumann E, Bartussek D. A revised film set for the induction of basic emotions. Cogn Emot. 2005;19(7):1095\u2013109.","journal-title":"Cogn Emot"},{"key":"263_CR37","doi-asserted-by":"crossref","unstructured":"Hoffman G, Zuckerman O, Hirschberger G, Luria M, Shani-Sherman T. Design and evaluation of a peripheral robotic conversation companion. In: Proceedings of the 10th ACM\/IEEE international conference on human-robot interaction (HRI), Portland; 2015.","DOI":"10.1145\/2696454.2696495"},{"issue":"4","key":"263_CR38","doi-asserted-by":"publisher","first-page":"852","DOI":"10.1109\/TCDS.2018.2826921","volume":"10","author":"R Hortensius","year":"2018","unstructured":"Hortensius R, Hekele F, Cross ES. The perception of emotion in artificial agents. IEEE Trans Cogn Dev Syst. 2018;10(4):852\u201364.","journal-title":"IEEE Trans Cogn Dev Syst"},{"issue":"1","key":"263_CR39","doi-asserted-by":"publisher","first-page":"71","DOI":"10.1093\/scan\/nsm040","volume":"3","author":"CA Hutcherson","year":"2008","unstructured":"Hutcherson CA, Goldin PR, Ramel W, McRae K, Gross JJ. Attention and emotion influence the relationship between extraversion and neural response. Soc Cogn Affect Neurosci. 2008;3(1):71\u20139.","journal-title":"Soc Cogn Affect Neurosci"},{"issue":"4","key":"263_CR40","doi-asserted-by":"publisher","first-page":"379","DOI":"10.1023\/A:1025761017833","volume":"6","author":"A Iida","year":"2003","unstructured":"Iida A, Campbell N. Speech database design for a concatenative text-to-speech synthesis system for individuals with communication disorders. Speech Technol. 2003;6(4):379\u201392.","journal-title":"Speech Technol"},{"key":"263_CR41","doi-asserted-by":"publisher","first-page":"189","DOI":"10.1007\/978-4-431-68147-2_12","volume-title":"Modeling in computer graphics","author":"P Kalra","year":"1991","unstructured":"Kalra P, Mangili A, Magnenat-Thalmann N, Thalmann D. SMILE: a multilayered facial animation system. In: Kunii T, editor. Modeling in computer graphics. Berlin: Springer-Verlag; 1991. p. 189\u201398."},{"issue":"4","key":"263_CR42","doi-asserted-by":"publisher","first-page":"687","DOI":"10.1111\/j.0022-3506.2004.00277.x","volume":"72","author":"SM Kang","year":"2004","unstructured":"Kang SM, Shaver PR. Individual differences in emotional complexity: their psychological implications. Personality. 2004;72(4):687\u2013726.","journal-title":"Personality"},{"issue":"4","key":"263_CR43","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1145\/3072959.3073658","volume":"36","author":"T Karras","year":"2017","unstructured":"Karras T, Aila T, Laine S, Herva A, Lehtinen J. Audio-driven facial animation by joint end-to-end learning of pose and emotion. ACM Trans Graph. 2017;36(4):1\u201312.","journal-title":"ACM Trans Graph."},{"issue":"1","key":"263_CR44","doi-asserted-by":"publisher","first-page":"10","DOI":"10.1177\/0963721414550708","volume":"24","author":"TB Kashdan","year":"2015","unstructured":"Kashdan TB, Barrett LF, McKnight PE. Unpacking emotion differentiation: transforming unpleasant experience by perceiving distinctions in negativity. Curr Dir Psychol Sci. 2015;24(1):10\u20136.","journal-title":"Curr Dir Psychol Sci"},{"key":"263_CR45","doi-asserted-by":"publisher","first-page":"153","DOI":"10.1007\/978-1-4615-9328-7_15","volume-title":"Semiotics 1981","author":"A Kendon","year":"1983","unstructured":"Kendon A. The study of gesture: some remarks on its history. In: Deely J, Lenhart M, editors. Semiotics 1981. Berlin: Springer-Verlag; 1983. p. 153\u201364."},{"key":"263_CR46","first-page":"131","volume-title":"Cross cultural perspectives in non-verbal communication","author":"A Kendon","year":"1988","unstructured":"Kendon A. How gestures can become like words. In: Poyatos F, editor. Cross cultural perspectives in non-verbal communication. Toronto: Hogrefe; 1988. p. 131\u201341."},{"key":"263_CR47","doi-asserted-by":"publisher","first-page":"414","DOI":"10.1016\/j.psychres.2017.12.068","volume":"261","author":"H Kim","year":"2018","unstructured":"Kim H, Lu X, Costa M, Kandemir B, Adams RB, Li J, Wang JZ, Newman MG. Development and validation of image stimuli for emotion elicitation (ISEE): a novel affective pictorial system with test-retest repeatability. Psychiatry Res. 2018;261:414\u201320.","journal-title":"Psychiatry Res."},{"issue":"1","key":"263_CR48","doi-asserted-by":"publisher","first-page":"39","DOI":"10.1002\/cav.6","volume":"15","author":"S Kopp","year":"2004","unstructured":"Kopp S, Wachsmuth I. Synthesizing multimodal utterances for conversational agents. Comput Animat Virtual Worlds. 2004;15(1):39\u201352.","journal-title":"Comput Animat Virtual Worlds"},{"key":"263_CR49","doi-asserted-by":"crossref","unstructured":"Kucherenko T, Hasegawa D, Henter GE, Kaneko N, Kjellstrom H. Analyzing input and output representations for speech-driven gesture generation. In: Proceedings of the ACM international conference on intelligent virtual agents (IVA), Paris; 2019.","DOI":"10.1145\/3308532.3329472"},{"key":"263_CR50","unstructured":"Le QA, Huang J, Pelachaud C. A common gesture and speech production framework for virtual and physical agents. In: Proceedings of the 14th ACM international conference on multimodal interaction (ICMI), California; 2012."},{"key":"263_CR51","unstructured":"Lee Y, Rabiee A, Lee SY. Emotional end-to-end neural speech synthesizer. In: Proceedings of the international conference on neural information processing systems (NIPS), Long Beach; 2017."},{"key":"263_CR52","doi-asserted-by":"crossref","unstructured":"Lutkebohle I, Hegel F, Schulz S, Hackel M, Wrede B, Wachsmuth S, Sagerer G. The Bielefeld anthropomorphic robot head Flobi. In: Proceedings of the IEEE international conference on robotics and automation (ICRA), Alaska; 2010. p 3384\u201391.","DOI":"10.1109\/ROBOT.2010.5509173"},{"key":"263_CR53","first-page":"3","volume-title":"Emotional development and emotional intelligence: educational Implications","author":"JD Mayer","year":"1997","unstructured":"Mayer JD, Salovey P. What is emotional intelligence? In: Salovey P, Sluyter D, editors. Emotional development and emotional intelligence: educational implications. New York: Basic Books; 1997. p. 3\u201334."},{"key":"263_CR54","doi-asserted-by":"publisher","first-page":"507","DOI":"10.1146\/annurev.psych.59.103006.093646","volume":"59","author":"JD Mayer","year":"2008","unstructured":"Mayer JD, Roberts RD, Barsade SG. Human abilities: emotional intelligence. Annu Rev Psychol. 2008;59:507\u201336.","journal-title":"Annu Rev Psychol"},{"key":"263_CR55","doi-asserted-by":"publisher","first-page":"261","DOI":"10.1007\/s12369-013-0226-7","volume":"6","author":"D McColl","year":"2014","unstructured":"McColl D, Nejat G. Recognizing emotional body language displayed by a human-like social robot. Int J Soc Robot. 2014;6:261\u201380.","journal-title":"Int J Soc Robot"},{"key":"263_CR56","volume-title":"Hand and mind: What gestures reveal about thought","author":"D McNeill","year":"1992","unstructured":"McNeill D. Hand and mind: what gestures reveal about thought. Chicago: University of Chicago Press; 1992."},{"key":"263_CR57","doi-asserted-by":"crossref","unstructured":"Mozziconacci, S. Prosody and emotions. In: Proceedings of the international conference on speech prosody, Aix-en-Provence; 2002. p. 1\u20139.","DOI":"10.21437\/SpeechProsody.2002-1"},{"issue":"4","key":"263_CR58","doi-asserted-by":"publisher","first-page":"369","DOI":"10.1016\/0167-6393(95)00005-9","volume":"16","author":"IR Murray","year":"1995","unstructured":"Murray IR, Arnott JL. Implementation and testing of a system for producing emotion-by-rule in synthetic speech. Speech Commun. 1995;16(4):369\u201390.","journal-title":"Speech Commun"},{"issue":"1","key":"263_CR59","first-page":"157","volume":"59","author":"PY Oudeyer","year":"2003","unstructured":"Oudeyer PY. The production and recognition of emotions in speech: features and algorithms. Hum-Comput Stud. 2003;59(1):157\u201383.","journal-title":"Hum-Comput Stud"},{"key":"263_CR60","doi-asserted-by":"crossref","unstructured":"Park E, Kim KJ, del Pobil AP. The effects of robot\u2019s body gesture and gender in human-robot interaction. In: Proceedings of the 15th international conference on internet and multimedia systems and applications, Washington DC; 2011.","DOI":"10.2316\/P.2011.747-023"},{"key":"263_CR61","doi-asserted-by":"crossref","unstructured":"Pelachaud C. Multimodal expressive embodied conversational agents. In: Proceedings of the 13th annual ACM international conference on multimedia, New York; 2005. p. 683\u20139.","DOI":"10.1145\/1101149.1101301"},{"issue":"11","key":"263_CR62","doi-asserted-by":"publisher","first-page":"e27256","DOI":"10.1371\/journal.pone.0027256","volume":"6","author":"MD Pell","year":"2011","unstructured":"Pell MD, Kotz SA. On the time course of vocal emotion recognition. PLoS ONE. 2011;6(11):e27256.","journal-title":"PLoS ONE."},{"key":"263_CR63","doi-asserted-by":"publisher","first-page":"906","DOI":"10.1016\/j.paid.2010.02.019","volume":"48","author":"KV Petrides","year":"2010","unstructured":"Petrides KV, Vernon PA, Schermer JA, Ligthart L, Boomsma DI, Veselka L. Relationships between trait emotional intelligence and the Big Five in the Netherlands. Personal Individ Differ. 2010;48:906\u201310.","journal-title":"Personal Individ Differ"},{"issue":"1\u20132","key":"263_CR64","doi-asserted-by":"publisher","first-page":"55","DOI":"10.1016\/S1071-5819(03)00052-1","volume":"59","author":"RW Picard","year":"2003","unstructured":"Picard RW. Affective computing: challenges. Int J Hum Comput Stud. 2003;59(1\u20132):55\u201364.","journal-title":"Int J Hum Comput Stud"},{"key":"263_CR65","doi-asserted-by":"publisher","first-page":"245","DOI":"10.1145\/965161.806812","volume":"15","author":"SM Platt","year":"1981","unstructured":"Platt SM, Badler N. Animating facial expressions. Comput Graph. 1981;15:245\u201352.","journal-title":"Comput Graph"},{"key":"263_CR66","doi-asserted-by":"crossref","unstructured":"Qian Y, Fan Y, Hu W, Soong FK. On the training aspects of deep neural network DNN for parametric TTS synthesis. In: Proceedings of the IEEE international conference on acoustics, speech and signal processing (ICASSP); 2014. p. 3829\u201333.","DOI":"10.1109\/ICASSP.2014.6854318"},{"key":"263_CR67","doi-asserted-by":"publisher","first-page":"54","DOI":"10.1017\/CBO9780511596544.007","volume-title":"The cambridge handbook of personality-psychology","author":"R Reisenzein","year":"2009","unstructured":"Reisenzein R, Weber H. Personality and emotion. In: Corr P, Matthews G, editors. The Cambridge handbook of personality-psychology. Cambridge: Cambridge University Press; 2009. p. 54\u201371."},{"key":"263_CR68","volume-title":"The Oxford companion to emotion and the affective sciences","author":"W Revelle","year":"2010","unstructured":"Revelle W, Scherer KR. Personality and emotion. In: Sander D, Scherer K, editors. The Oxford companion to emotion and the affective sciences. Oxford: Oxford University Press; 2010."},{"key":"263_CR69","doi-asserted-by":"crossref","unstructured":"Ribeiro FS, Santos FH, Albuquerque PB, Oliveira-Silva P. Emotional induction through music: measuring cardiac and electrodermal responses of emotional states and their persistence. Front Psychol; 2019.","DOI":"10.3389\/fpsyg.2019.00451"},{"key":"263_CR70","series-title":"Series in affective science","volume-title":"Handbook of emotion elicitation and assessment","author":"NA Roberts","year":"2007","unstructured":"Roberts NA, Tsai JL, Coan JA. Emotion elicitation using dyadic interaction tasks. In: Coan JA, Allen JJB, editors. Handbook of emotion elicitation and assessment. Series in affective science. Oxford: Oxford University Press; 2007."},{"key":"263_CR71","doi-asserted-by":"crossref","unstructured":"Salem M, Rohlfing K, Kopp S, Joublin F. A friendly gesture: investigating the effect of multimodal robot behavior in human-robot interaction. In: Proceedings of the 20th IEEE international symposium on robot and human interaction communciation (RO-MAN); 2011. p. 247\u201352.","DOI":"10.1109\/ROMAN.2011.6005285"},{"issue":"11","key":"263_CR72","doi-asserted-by":"publisher","first-page":"2251","DOI":"10.1080\/17470211003721642","volume":"63","author":"DA Sauter","year":"2010","unstructured":"Sauter DA, Eisner F, Calder AJ, Scott SK. Perceptual cues in nonverbal vocal expressions of emotion. Q J Exp Psychol. 2010;63(11):2251\u201372.","journal-title":"Q J Exp Psychol"},{"issue":"3","key":"263_CR73","doi-asserted-by":"publisher","first-page":"216","DOI":"10.1016\/j.tics.2017.01.001","volume":"21","author":"A Schirmer","year":"2017","unstructured":"Schirmer A, Adolphs R. Emotion perception from face, voice, and touch: comparisons and convergence. Trends Cogn Sci. 2017;21(3):216\u201328.","journal-title":"Trends Cogn Sci"},{"key":"263_CR74","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1177\/2158244016630591","volume":"6","author":"E Schreuder","year":"2016","unstructured":"Schreuder E, Erp JV, Toet A, Kallen VL. Emotional responses to multisensory environmental stimuli: a conceptual framework and literature review. SAGE Open. 2016;6:1\u201319.","journal-title":"SAGE Open"},{"issue":"4","key":"263_CR75","doi-asserted-by":"publisher","first-page":"365","DOI":"10.1023\/A:1025708916924","volume":"6","author":"M Schroder","year":"2003","unstructured":"Schroder M, Trouvain J. The German text-to-speech synthesis system Mary: a tool for research, development, and teaching. Speech Technol. 2003;6(4):365\u201377.","journal-title":"Speech Technol"},{"key":"263_CR76","doi-asserted-by":"publisher","first-page":"1454","DOI":"10.1073\/pnas.1322355111","volume":"111","author":"D Shichuan","year":"2014","unstructured":"Shichuan D, Yong T, Martinez AM. Compound facial expressions of emotion. Proceedings of the National Academy of Sciences of the United States of America (PNAS). 2014;111:1454\u201362.","journal-title":"Proc Natl Acad Sci USA"},{"issue":"2","key":"263_CR77","doi-asserted-by":"publisher","first-page":"147","DOI":"10.1080\/15298860600586206","volume":"5","author":"TE Shulman","year":"2006","unstructured":"Shulman TE, Hemenover SH. Is dispositional emotional intelligence synonymous with personality? Self Identity. 2006;5(2):147\u201371.","journal-title":"Self Identity"},{"key":"263_CR78","doi-asserted-by":"crossref","unstructured":"Siegel M, Breazeal C, Norton M. Persuasive robotics: the influence of robot gender on human behavior. In: Proceedings of the IEEE\/RSJ international conference on intelligent robots and systems (IROS), Missouri; 2009. p. 2563\u201368.","DOI":"10.1109\/IROS.2009.5354116"},{"key":"263_CR79","doi-asserted-by":"crossref","unstructured":"Silva LCD, Miyasato T, Nakatsu R. Facial emotion recognition using multimodal information. In: Proceedings of IEEE international conference on information, communications, and signal processing (ICICS), vol\u00a01, Singapore; 1997. p. 397\u2013401.","DOI":"10.1109\/ICICS.1997.647126"},{"issue":"2","key":"263_CR80","doi-asserted-by":"publisher","first-page":"115","DOI":"10.3758\/BF03195356","volume":"33","author":"J Spencer-Smith","year":"2001","unstructured":"Spencer-Smith J, Wild H, Innes-Ker A, Townsend JT, Duffy C, Edwards C, Ervin K, Merritt N, Paik JW. Making faces: creating three-dimensional parameterized models of facial expression. Behav Res Methods Instrum Comput. 2001;33(2):115\u201323.","journal-title":"Behav Res Methods Instrum Comput"},{"key":"263_CR81","doi-asserted-by":"crossref","unstructured":"Tapus A, Aly A. User adaptable robot behavior. In: Proceedings of the IEEE international conference on collaboration technologies and systems (CTS), Pennsylvania; 2011.","DOI":"10.1109\/CTS.2011.5928681"},{"key":"263_CR82","doi-asserted-by":"publisher","first-page":"123","DOI":"10.1016\/S0167-6393(96)00068-4","volume":"21","author":"P Taylor","year":"1997","unstructured":"Taylor P, Isard A. SSML: A speech synthesis markup language. Speech Commun. 1997;21:123\u201333.","journal-title":"Speech Commun"},{"issue":"4","key":"263_CR83","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1145\/3072959.3073699","volume":"36","author":"S Taylor","year":"2017","unstructured":"Taylor S, Kim T, Yue Y, Mahler M, Krahe J, Rodriguez AG, Hodgins J, Matthews I. A deep learning approach for generalized speech animation. ACM Trans Graph. 2017;36(4):1\u201311.","journal-title":"ACM Trans Graph"},{"key":"263_CR84","doi-asserted-by":"publisher","first-page":"555","DOI":"10.1007\/s12369-019-00524-z","volume":"11","author":"C Tsiourti","year":"2019","unstructured":"Tsiourti C, Weiss A, Wac K, Vincze\u00a0M. Multimodal integration of emotional signals from voice, body, and context: effects of (in)congruence on emotion recognition and attitudes towards robots. Int J Soc Robot. 2019;11:555\u201373.","journal-title":"Int J Soc Robot"},{"key":"263_CR85","doi-asserted-by":"crossref","unstructured":"Uhrig MK, Trautmann N, Baumgartner U, Treede RD, Henrich F, Hiller W, Marschall S. Emotion elicitation: a comparison of pictures and films. Front Psychol; 2016.","DOI":"10.3389\/fpsyg.2016.00180"},{"key":"263_CR86","doi-asserted-by":"crossref","unstructured":"Um SY, Oh S, Byun K, Jang I, Ahn C, Kang HG. Emotional speech synthesis with rich and granularized control. In: Proceedings of the IEEE international conference on acoustics, speech and signal processing (ICASSP), Barcelona; 2020.","DOI":"10.1109\/ICASSP40776.2020.9053732"},{"key":"263_CR87","doi-asserted-by":"crossref","unstructured":"Vlachos E, Sch\u00e4rfe, H. Android emotions revealed. In: Proceedings of the 4th international conference on social robotics (ICSR), Chengdu; 2012. p. 56\u201365.","DOI":"10.1007\/978-3-642-34103-8_6"},{"key":"263_CR88","doi-asserted-by":"crossref","unstructured":"Vougioukas K, Petridis S, Pantic M. End-to-end speech-driven facial animation with temporal GANs. In: Proceedings of the British machine vision conference (BMVC), UK; 2018.","DOI":"10.1007\/s11263-019-01251-8"},{"key":"263_CR89","doi-asserted-by":"publisher","first-page":"879","DOI":"10.1002\/(SICI)1099-0992(1998110)28:6<879::AID-EJSP901>3.0.CO;2-W","volume":"28","author":"HG Wallbott","year":"1998","unstructured":"Wallbott HG. Bodily expression of emotion. Eur J Soc Psychol. 1998;28:879\u201396.","journal-title":"Eur J Soc Psychol"},{"key":"263_CR90","doi-asserted-by":"crossref","unstructured":"Wang Y, Skerry-Ryan RJ, Stanton D, Wu Y, Weiss RJ, Jaitly N, Yang Z, Xiao Y, Chen Z, Bengio S, Le Q, Agiomyrgiannakis Y, Clark R, Saurous R.A. Tacotron: Towards end-to-end speech synthesis. In: Proceedings of the annual conference of the international speech communication association (INTERSPEECH); 2017.","DOI":"10.21437\/Interspeech.2017-1452"},{"key":"263_CR91","doi-asserted-by":"crossref","unstructured":"Yoon Y, Ko WR, Jang M, Lee J, Kim J, Lee G. Robots learn social skills: End-to-end learning of co-speech gesture generation for humanoid robots. In: Proceedings of the international conference on robotics and automation (ICRA), Montreal; 2019. p. 4303\u20139.","DOI":"10.1109\/ICRA.2019.8793720"},{"key":"263_CR92","doi-asserted-by":"crossref","unstructured":"Zen H, Senior A, Schuster M. Statistical parametric speech synthesis using deep neural networks. In: Proceedings of the IEEE international conference on acoustics, speech and signal processing (ICASSP); 2013. p. 7962\u20136.","DOI":"10.1109\/ICASSP.2013.6639215"},{"key":"263_CR93","doi-asserted-by":"crossref","unstructured":"Zeng Z, Pantic M, Roisman GI, Huang TS. A survey of affect recognition methods: audio, visual, and spontaneous expressions. IEEE Trans Pattern Anal Mach Intell. 2009;31(1).","DOI":"10.1109\/TPAMI.2008.52"}],"container-title":["SN Computer Science"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s42979-020-00263-3.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s42979-020-00263-3\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s42979-020-00263-3.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,8,15]],"date-time":"2024-08-15T03:14:47Z","timestamp":1723691687000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s42979-020-00263-3"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2020,9,29]]},"references-count":93,"journal-issue":{"issue":"6","published-print":{"date-parts":[[2020,11]]}},"alternative-id":["263"],"URL":"https:\/\/doi.org\/10.1007\/s42979-020-00263-3","relation":{},"ISSN":["2662-995X","2661-8907"],"issn-type":[{"value":"2662-995X","type":"print"},{"value":"2661-8907","type":"electronic"}],"subject":[],"published":{"date-parts":[[2020,9,29]]},"assertion":[{"value":"26 February 2020","order":1,"name":"received","label":"Received","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"19 July 2020","order":2,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"29 September 2020","order":3,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Compliance with Ethical Standards"}},{"value":"On behalf of all authors, the corresponding author states that there is no conflict of interest.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Conflict of Interest"}}],"article-number":"314"}}