{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,20]],"date-time":"2026-03-20T16:45:06Z","timestamp":1774025106517,"version":"3.50.1"},"reference-count":36,"publisher":"Springer Science and Business Media LLC","issue":"1","license":[{"start":{"date-parts":[[2024,12,4]],"date-time":"2024-12-04T00:00:00Z","timestamp":1733270400000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2024,12,4]],"date-time":"2024-12-04T00:00:00Z","timestamp":1733270400000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"funder":[{"name":"Zhejiang Provincial Natural Science Foundation of Chian","award":["LGF22H090004"],"award-info":[{"award-number":["LGF22H090004"]}]},{"name":"Key Research and Development Project of Zhejiang Province","award":["2020C04009"],"award-info":[{"award-number":["2020C04009"]}]},{"name":"Laboratory of Brain Machine Collaborative","award":["2020E10010"],"award-info":[{"award-number":["2020E10010"]}]}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["SIViP"],"published-print":{"date-parts":[[2025,1]]},"DOI":"10.1007\/s11760-024-03632-0","type":"journal-article","created":{"date-parts":[[2024,12,4]],"date-time":"2024-12-04T17:24:49Z","timestamp":1733333089000},"update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":9,"title":["MF-Net: a multimodal fusion network for emotion recognition based on multiple physiological signals"],"prefix":"10.1007","volume":"19","author":[{"given":"Lei","family":"Zhu","sequence":"first","affiliation":[]},{"given":"Yu","family":"Ding","sequence":"additional","affiliation":[]},{"given":"Aiai","family":"Huang","sequence":"additional","affiliation":[]},{"given":"Xufei","family":"Tan","sequence":"additional","affiliation":[]},{"given":"Jianhai","family":"Zhang","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2024,12,4]]},"reference":[{"key":"3632_CR1","doi-asserted-by":"publisher","DOI":"10.1016\/j.knosys.2019.105217","volume":"190","author":"L Fiorini","year":"2020","unstructured":"Fiorini, L., Mancioppi, G., Semeraro, F., Fujita, H., Cavallo, F.: Unsupervised emotional state classification through physiological parameters for social robotics applications. Knowl.-Based Syst. 190, 105217 (2020)","journal-title":"Knowl.-Based Syst."},{"key":"3632_CR2","doi-asserted-by":"publisher","DOI":"10.1016\/j.rico.2023.100231","volume":"11","author":"SAM Mane","year":"2023","unstructured":"Mane, S.A.M., Shinde, A.: StressNet: hybrid model of LSTM and CNN for stress detection from electroencephalogram signal (EEG). Results Control Optim. 11, 100231 (2023)","journal-title":"Results Control Optim."},{"key":"3632_CR3","doi-asserted-by":"crossref","unstructured":"Gao, D., Wang, K., Wang, M., Zhou, J., Zhang, Y.: SFT-Net: a network for detecting fatigue from EEG signals by combining 4D feature flow and attention mechanism. IEEE J. Biomed. Health Informa 28, 4444\u20134455 (2023). https:\/\/api.semanticscholar.org\/CorpusID:259153959","DOI":"10.1109\/JBHI.2023.3285268"},{"key":"3632_CR4","doi-asserted-by":"publisher","first-page":"19","DOI":"10.1016\/j.inffus.2022.03.009","volume":"83","author":"Y Wang","year":"2022","unstructured":"Wang, Y., Song, W., Tao, W., Liotta, A., Yang, D., Li, X., Gao, S., Sun, Y., Ge, W., Zhang, W., et al.: A systematic review on affective computing: emotion models, databases, and recent advances. Inf. Fusion 83, 19\u201352 (2022)","journal-title":"Inf. Fusion"},{"key":"3632_CR5","doi-asserted-by":"crossref","unstructured":"Li, Y., Guo, W., Wang, Y.: Emotion recognition with attention mechanism-guided dual-feature multi-path interaction network. Signal Image Video Process. 1\u201310 (2024)","DOI":"10.1007\/s11760-024-03178-1"},{"key":"3632_CR6","doi-asserted-by":"publisher","DOI":"10.1016\/j.eswa.2021.116101","volume":"188","author":"H Kim","year":"2022","unstructured":"Kim, H., Zhang, D., Kim, L., Im, C.-H.: Classification of individual\u2019s discrete emotions reflected in facial microexpressions using electroencephalogram and facial electromyogram. Expert Syst. Appl. 188, 116101 (2022)","journal-title":"Expert Syst. Appl."},{"key":"3632_CR7","doi-asserted-by":"publisher","DOI":"10.1016\/j.compbiomed.2021.104696","volume":"136","author":"MM Rahman","year":"2021","unstructured":"Rahman, M.M., Sarkar, A.K., Hossain, M.A., Hossain, M.S., Islam, M.R., Hossain, M.B., Quinn, J.M., Moni, M.A.: Recognition of human emotions using EEG signals: a review. Comput. Biol. Med. 136, 104696 (2021)","journal-title":"Comput. Biol. Med."},{"issue":"4","key":"3632_CR8","doi-asserted-by":"publisher","first-page":"857","DOI":"10.1109\/TAFFC.2019.2901673","volume":"12","author":"J Shukla","year":"2019","unstructured":"Shukla, J., Barreda-Angeles, M., Oliver, J., Nandi, G.C., Puig, D.: Feature extraction and selection for emotion recognition from electrodermal activity. IEEE Trans. Affect. Comput. 12(4), 857\u2013869 (2019)","journal-title":"IEEE Trans. Affect. Comput."},{"key":"3632_CR9","doi-asserted-by":"publisher","first-page":"84","DOI":"10.1016\/j.compind.2017.04.005","volume":"92","author":"Q Zhang","year":"2017","unstructured":"Zhang, Q., Chen, X., Zhan, Q., Yang, T., Xia, S.: Respiration-based emotion recognition with deep learning. Comput. Ind. 92, 84\u201390 (2017)","journal-title":"Comput. Ind."},{"key":"3632_CR10","doi-asserted-by":"publisher","first-page":"1229","DOI":"10.1007\/s11571-022-09898-9","volume":"17","author":"AA Saleem","year":"2022","unstructured":"Saleem, A.A., Siddiqui, H.U.R., Raza, M.A., Rustam, F., Dudley, S.E.M., Ashraf, I.: A systematic review of physiological signals based driver drowsiness detection systems. Cogn. Neurodyn. 17, 1229\u20131259 (2022)","journal-title":"Cogn. Neurodyn."},{"key":"3632_CR11","doi-asserted-by":"crossref","unstructured":"Liu, H., Lou, T., Zhang, Y., Wu, Y., Xiao, Y., Jensen, C.S., Zhang, D.: EEG-based multimodal emotion recognition: a machine learning perspective. IEEE Trans. Instrum. Meas. (2024)","DOI":"10.1109\/TIM.2024.3369130"},{"key":"3632_CR12","doi-asserted-by":"publisher","first-page":"468","DOI":"10.1016\/j.neuropsychologia.2015.03.001","volume":"70","author":"F Ferri","year":"2015","unstructured":"Ferri, F., Tajadura-Jim\u00e9nez, A., V\u00e4ljam\u00e4e, A., Vastano, R., Costantini, M.: Emotion-inducing approaching sounds shape the boundaries of multisensory peripersonal space. Neuropsychologia 70, 468\u2013475 (2015)","journal-title":"Neuropsychologia"},{"key":"3632_CR13","unstructured":"Ekman, P., Friesen, W.V., Ellsworth, P.C.: Emotion in the human face: guidelines for research and an integration of findings (1972). https:\/\/api.semanticscholar.org\/CorpusID:141855078"},{"key":"3632_CR14","doi-asserted-by":"publisher","first-page":"59","DOI":"10.1109\/MSP.2021.3106895","volume":"38","author":"S Zhao","year":"2021","unstructured":"Zhao, S., Jia, G., Yang, J., Ding, G., Keutzer, K.: Emotion recognition from multiple modalities: fundamentals and methodologies. IEEE Signal Process. Mag. 38, 59\u201373 (2021)","journal-title":"IEEE Signal Process. Mag."},{"key":"3632_CR15","doi-asserted-by":"crossref","unstructured":"Ackermann, P., Kohlschein, C., Bitsch, J.A., Wehrle, K., Jeschke, S.: EEG-based automatic emotion recognition: feature extraction, selection and classification methods. In: 2016 IEEE 18th International Conference on E-health Networking, Applications and Services (Healthcom), pp. 1\u20136. IEEE (2016)","DOI":"10.1109\/HealthCom.2016.7749447"},{"issue":"5","key":"3632_CR16","doi-asserted-by":"publisher","first-page":"2305","DOI":"10.1007\/s11760-022-02447-1","volume":"17","author":"Y Zhang","year":"2023","unstructured":"Zhang, Y., Zhang, Y., Wang, S.: An attention-based hybrid deep learning model for EEG emotion recognition. SIViP 17(5), 2305\u20132313 (2023)","journal-title":"SIViP"},{"issue":"1","key":"3632_CR17","doi-asserted-by":"publisher","first-page":"382","DOI":"10.1109\/TAFFC.2020.3025777","volume":"14","author":"W Tao","year":"2020","unstructured":"Tao, W., Li, C., Song, R., Cheng, J., Liu, Y., Wan, F., Chen, X.: EEG-based emotion recognition via channel-wise attention and self attention. IEEE Trans. Affect. Comput. 14(1), 382\u2013393 (2020)","journal-title":"IEEE Trans. Affect. Comput."},{"key":"3632_CR18","doi-asserted-by":"publisher","DOI":"10.1016\/j.compbiomed.2020.103927","volume":"123","author":"Y Liu","year":"2020","unstructured":"Liu, Y., Ding, Y., Li, C., Cheng, J., Song, R., Wan, F., Chen, X.: Multi-channel EEG-based emotion recognition via a multi-level features guided capsule network. Comput. Biol. Med. 123, 103927 (2020)","journal-title":"Comput. Biol. Med."},{"key":"3632_CR19","doi-asserted-by":"publisher","DOI":"10.1016\/j.asoc.2022.108740","volume":"122","author":"D Li","year":"2022","unstructured":"Li, D., Xie, L., Chai, B., Wang, Z., Yang, H.: Spatial-frequency convolutional self-attention network for EEG emotion recognition. Appl. Soft Comput. 122, 108740 (2022)","journal-title":"Appl. Soft Comput."},{"key":"3632_CR20","doi-asserted-by":"publisher","DOI":"10.1016\/j.compbiomed.2022.105303","volume":"143","author":"C Li","year":"2022","unstructured":"Li, C., Wang, B., Zhang, S., Liu, Y., Song, R., Cheng, J., Chen, X.: Emotion recognition from EEG based on multi-task learning with capsule network and attention mechanism. Comput. Biol. Med. 143, 105303 (2022)","journal-title":"Comput. Biol. Med."},{"key":"3632_CR21","doi-asserted-by":"publisher","DOI":"10.1016\/j.neuroimage.2022.119420","volume":"259","author":"X Ru","year":"2022","unstructured":"Ru, X., He, K., Lyu, B., Li, D., Xu, W., Gu, W., Ma, X., Liu, J., Li, C., Li, T., et al.: Multimodal neuroimaging with optically pumped magnetometers: a simultaneous MEG-EEG-FNIRS acquisition system. Neuroimage 259, 119420 (2022)","journal-title":"Neuroimage"},{"key":"3632_CR22","doi-asserted-by":"publisher","first-page":"98","DOI":"10.1016\/j.inffus.2017.02.003","volume":"37","author":"S Poria","year":"2017","unstructured":"Poria, S., Cambria, E., Bajpai, R., Hussain, A.: A review of affective computing: from unimodal analysis to multimodal fusion. Inf. Fusion 37, 98\u2013125 (2017)","journal-title":"Inf. Fusion"},{"issue":"2","key":"3632_CR23","doi-asserted-by":"publisher","first-page":"423","DOI":"10.1109\/TPAMI.2018.2798607","volume":"41","author":"T Baltru\u0161aitis","year":"2018","unstructured":"Baltru\u0161aitis, T., Ahuja, C., Morency, L.-P.: Multimodal machine learning: a survey and taxonomy. IEEE Trans. Pattern Anal. Mach. Intell. 41(2), 423\u2013443 (2018)","journal-title":"IEEE Trans. Pattern Anal. Mach. Intell."},{"key":"3632_CR24","doi-asserted-by":"publisher","first-page":"71","DOI":"10.1016\/j.patrec.2022.08.018","volume":"162","author":"R Agarwal","year":"2022","unstructured":"Agarwal, R., Andujar, M., Canavan, S.J.: Classification of emotions using EEG activity associated with different areas of the brain. Pattern Recognit. Lett. 162, 71\u201380 (2022)","journal-title":"Pattern Recognit. Lett."},{"key":"3632_CR25","doi-asserted-by":"crossref","unstructured":"Lin, W., Li, C., Sun, S.: Deep convolutional neural network for emotion recognition using EEG and peripheral physiological signal. In: Image and Graphics: 9th International Conference, ICIG 2017, Shanghai, China, September 13-15, 2017, Revised Selected Papers, Part II 9, pp. 385\u2013394. Springer (2017)","DOI":"10.1007\/978-3-319-71589-6_33"},{"key":"3632_CR26","doi-asserted-by":"crossref","unstructured":"Ma, J., Tang, H., Zheng, W.-L., Lu, B.-L.: Emotion recognition using multimodal residual lstm network. In: Proceedings of the 27th ACM International Conference on Multimedia, pp. 176\u2013183 (2019)","DOI":"10.1145\/3343031.3350871"},{"key":"3632_CR27","unstructured":"Li, Q., Liu, Y., Yan, F., Zhang, Q., Liu, C.: Emotion recognition based on multiple physiological signals. Zhongguo yi liao qi xie za zhi = Chin. J. Med. Instrum. 444, 283\u2013287 (2020)"},{"key":"3632_CR28","doi-asserted-by":"publisher","first-page":"671","DOI":"10.1007\/s11571-022-09851-w","volume":"17","author":"S Chen","year":"2022","unstructured":"Chen, S., Tang, J., Zhu, L., Kong, W.: A multi-stage dynamical fusion network for multimodal emotion recognition. Cogn. Neurodyn. 17, 671\u2013680 (2022)","journal-title":"Cogn. Neurodyn."},{"key":"3632_CR29","doi-asserted-by":"crossref","unstructured":"Wang, Y., Jiang, W.-B., Li, R., Lu, B.-L.: Emotion transformer fusion: complementary representation properties of EEG and eye movements on recognizing anger and surprise. In: 2021 IEEE International Conference on Bioinformatics and Biomedicine (BIBM), pp. 1575\u20131578. IEEE (2021)","DOI":"10.1109\/BIBM52615.2021.9669556"},{"key":"3632_CR30","doi-asserted-by":"publisher","DOI":"10.1016\/j.dsp.2023.104278","volume":"144","author":"L Gong","year":"2024","unstructured":"Gong, L., Chen, W., Li, M., Zhang, T.: Emotion recognition from multiple physiological signals using intra-and inter-modality attention fusion network. Digit. Signal Process. 144, 104278 (2024)","journal-title":"Digit. Signal Process."},{"key":"3632_CR31","doi-asserted-by":"publisher","first-page":"715","DOI":"10.1109\/TCDS.2021.3071170","volume":"14","author":"W Liu","year":"2021","unstructured":"Liu, W., Qiu, J., Zheng, W.-L., Lu, B.-L.: Comparing recognition performance and robustness of multimodal deep learning models for multimodal emotion recognition. IEEE Trans. Cognit. Dev. Syst. 14, 715\u2013729 (2021)","journal-title":"IEEE Trans. Cognit. Dev. Syst."},{"key":"3632_CR32","doi-asserted-by":"publisher","first-page":"1234162","DOI":"10.3389\/fnins.2023.1234162","volume":"17","author":"B Fu","year":"2023","unstructured":"Fu, B., Gu, C., Fu, M., Xia, Y., Liu, Y.: A novel feature fusion network for multimodal emotion recognition from EEG and eye movement signals. Front. Neurosci. 17, 1234162 (2023)","journal-title":"Front. Neurosci."},{"key":"3632_CR33","doi-asserted-by":"publisher","first-page":"7943","DOI":"10.1109\/ACCESS.2021.3049516","volume":"9","author":"Y Zhang","year":"2021","unstructured":"Zhang, Y., Cheng, C., Zhang, Y.: Multimodal emotion recognition using a hierarchical fusion convolutional neural network. IEEE Access 9, 7943\u20137951 (2021). https:\/\/doi.org\/10.1109\/ACCESS.2021.3049516","journal-title":"IEEE Access"},{"issue":"1","key":"3632_CR34","doi-asserted-by":"publisher","first-page":"18","DOI":"10.1109\/T-AFFC.2011.15","volume":"3","author":"S Koelstra","year":"2011","unstructured":"Koelstra, S., Muhl, C., Soleymani, M., Lee, J.-S., Yazdani, A., Ebrahimi, T., Pun, T., Nijholt, A., Patras, I.: Deap: a database for emotion analysis; using physiological signals. IEEE Trans. Affect. Comput. 3(1), 18\u201331 (2011)","journal-title":"IEEE Trans. Affect. Comput."},{"issue":"6","key":"3632_CR35","first-page":"63","volume":"35","author":"JD Morris","year":"1995","unstructured":"Morris, J.D.: Observations: SAM: the Self-assessment Manikin an efficient cross-cultural measurement of emotional response 1. J. Advert. Res. 35(6), 63\u201368 (1995)","journal-title":"J. Advert. Res."},{"issue":"1","key":"3632_CR36","doi-asserted-by":"publisher","first-page":"98","DOI":"10.1109\/JBHI.2017.2688239","volume":"22","author":"S Katsigiannis","year":"2017","unstructured":"Katsigiannis, S., Ramzan, N.: Dreamer: a database for emotion recognition through EEG and ECG signals from wireless low-cost off-the-shelf devices. IEEE J. Biomed. Health Inform. 22(1), 98\u2013107 (2017)","journal-title":"IEEE J. Biomed. Health Inform."}],"container-title":["Signal, Image and Video Processing"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11760-024-03632-0.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s11760-024-03632-0\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11760-024-03632-0.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,1,28]],"date-time":"2025-01-28T17:48:32Z","timestamp":1738086512000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s11760-024-03632-0"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,12,4]]},"references-count":36,"journal-issue":{"issue":"1","published-print":{"date-parts":[[2025,1]]}},"alternative-id":["3632"],"URL":"https:\/\/doi.org\/10.1007\/s11760-024-03632-0","relation":{},"ISSN":["1863-1703","1863-1711"],"issn-type":[{"value":"1863-1703","type":"print"},{"value":"1863-1711","type":"electronic"}],"subject":[],"published":{"date-parts":[[2024,12,4]]},"assertion":[{"value":"23 July 2024","order":1,"name":"received","label":"Received","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"26 September 2024","order":2,"name":"revised","label":"Revised","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"15 October 2024","order":3,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"4 December 2024","order":4,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Declarations"}},{"value":"The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Conflict of interest"}}],"article-number":"58"}}