{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,10,17]],"date-time":"2025-10-17T14:31:16Z","timestamp":1760711476294,"version":"3.41.2"},"reference-count":4,"publisher":"Frontiers Media SA","license":[{"start":{"date-parts":[[2025,2,25]],"date-time":"2025-02-25T00:00:00Z","timestamp":1740441600000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/"}],"content-domain":{"domain":["frontiersin.org"],"crossmark-restriction":true},"short-container-title":["Front. Robot. AI"],"DOI":"10.3389\/frobt.2025.1572828","type":"journal-article","created":{"date-parts":[[2025,2,25]],"date-time":"2025-02-25T05:11:14Z","timestamp":1740460274000},"update-policy":"https:\/\/doi.org\/10.3389\/crossmark-policy","source":"Crossref","is-referenced-by-count":2,"title":["Editorial: AI-powered musical and entertainment robotics"],"prefix":"10.3389","volume":"12","author":[{"given":"Huijiang","family":"Wang","sequence":"first","affiliation":[]},{"given":"Josie","family":"Hughes","sequence":"additional","affiliation":[]},{"given":"Tetsushi","family":"Nonaka","sequence":"additional","affiliation":[]},{"given":"Arsen","family":"Abdulali","sequence":"additional","affiliation":[]},{"given":"Thilina Dulantha","family":"Lalitharatne","sequence":"additional","affiliation":[]},{"given":"Fumiya","family":"Iida","sequence":"additional","affiliation":[]}],"member":"1965","published-online":{"date-parts":[[2025,2,25]]},"reference":[{"key":"B1","doi-asserted-by":"publisher","first-page":"3","DOI":"10.1186\/1687-4722-2012-3","article-title":"Towards expressive musical robots: a cross-modal framework for emotional gesture, voice and music","volume":"2012","author":"Lim","year":"2012","journal-title":"EURASIP J. Audio, Speech, Music Process."},{"key":"B2","doi-asserted-by":"publisher","first-page":"101","DOI":"10.1007\/s10846-015-0259-2","article-title":"A survey of autonomous human affect detection methods for social robots engaged in natural hri","volume":"82","author":"McColl","year":"2016","journal-title":"J. Intelligent & Robotic Syst."},{"key":"B3","doi-asserted-by":"crossref","first-page":"300","DOI":"10.1109\/RoboSoft54090.2022.9762138","article-title":"Data-driven simulation framework for expressive piano playing by anthropomorphic hand with variable passive properties","volume-title":"2022 IEEE 5th international conference on soft robotics (RoboSoft)","author":"Wang","year":"2022"},{"key":"B4","doi-asserted-by":"publisher","first-page":"4650","DOI":"10.1109\/tro.2024.3484633","article-title":"Human-robot cooperative piano playing with learning-based real-time music accompaniment","volume":"40","author":"Wang","year":"2024","journal-title":"IEEE Trans. Robotics"}],"container-title":["Frontiers in Robotics and AI"],"original-title":[],"link":[{"URL":"https:\/\/www.frontiersin.org\/articles\/10.3389\/frobt.2025.1572828\/full","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,2,25]],"date-time":"2025-02-25T05:11:16Z","timestamp":1740460276000},"score":1,"resource":{"primary":{"URL":"https:\/\/www.frontiersin.org\/articles\/10.3389\/frobt.2025.1572828\/full"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,2,25]]},"references-count":4,"alternative-id":["10.3389\/frobt.2025.1572828"],"URL":"https:\/\/doi.org\/10.3389\/frobt.2025.1572828","relation":{},"ISSN":["2296-9144"],"issn-type":[{"type":"electronic","value":"2296-9144"}],"subject":[],"published":{"date-parts":[[2025,2,25]]},"article-number":"1572828"}}