{"metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:00231576","sets":["6164:6165:9654:11509"]},"path":["11509"],"owner":"44499","recid":"231576","title":["A Proposal of Emotion Estimation Method in Social Robots using Facial Expression Recognition Model with Graph-based Techniques"],"pubdate":{"attribute_name":"公開日","attribute_value":"2023-12-20"},"_buckets":{"deposit":"48ec48e6-fe10-4b19-b558-4129f025ffb4"},"_deposit":{"id":"231576","pid":{"type":"depid","value":"231576","revision_id":0},"owners":[44499],"status":"published","created_by":44499},"item_title":"A Proposal of Emotion Estimation Method in Social Robots using Facial Expression Recognition Model with Graph-based Techniques","author_link":["625345","625342","625341","625347","625343","625346","625340","625338","625344","625339"],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"A Proposal of Emotion Estimation Method in Social Robots using Facial Expression Recognition Model with Graph-based Techniques"},{"subitem_title":"A Proposal of Emotion Estimation Method in Social Robots using Facial Expression Recognition Model with Graph-based Techniques","subitem_title_language":"en"}]},"item_type_id":"18","publish_date":"2023-12-20","item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"eng"}]},"item_18_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"King Mongkut's University of Technology Thonburi"},{"subitem_text_value":"Shibaura Institute of Technology"},{"subitem_text_value":"Shibaura Institute of Technology"},{"subitem_text_value":"Shibaura Institute of Technology"},{"subitem_text_value":"Shibaura Institute of Technology"}]},"item_18_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"King Mongkut's University of Technology Thonburi","subitem_text_language":"en"},{"subitem_text_value":"Shibaura Institute of Technology","subitem_text_language":"en"},{"subitem_text_value":"Shibaura Institute of Technology","subitem_text_language":"en"},{"subitem_text_value":"Shibaura Institute of Technology","subitem_text_language":"en"},{"subitem_text_value":"Shibaura Institute of Technology","subitem_text_language":"en"}]},"item_publisher":{"attribute_name":"出版者","attribute_value_mlt":[{"subitem_publisher":"情報処理学会","subitem_publisher_language":"ja"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/231576/files/IPSJ-APRIS2023008.pdf","label":"IPSJ-APRIS2023008.pdf"},"date":[{"dateType":"Available","dateValue":"2023-12-20"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-APRIS2023008.pdf","filesize":[{"value":"872.9 kB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"0","billingrole":"5"},{"tax":["include_tax"],"price":"0","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"42"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"263922ac-c097-44b3-bae1-8bbf71c96f51","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2023 by the Information Processing Society of Japan"}]},"item_18_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Nopphakorn, Subsa-ard"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Felipe, Yudi Fulini"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Tipporn, Laohakangvalvit"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Kaoru, Suzuki"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Midori, Sugaya"}],"nameIdentifiers":[{}]}]},"item_18_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Nopphakorn, Subsa-ard","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Felipe, Yudi Fulini","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Tipporn, Laohakangvalvit","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Kaoru, Suzuki","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Midori, Sugaya","creatorNameLang":"en"}],"nameIdentifiers":[{}]}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_5794","resourcetype":"conference paper"}]},"item_18_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"Social robots play a crucial role in human-robot interaction, offering potential benefits in various applications, including nursing care and home assistance. Our previous study proposed “a prototype of multi-modal interaction robot based on emotion estimation method using physiological signals”, which presented our implementation on using electroencephalography (EEG) and Heart Rate Variability (HRV) as physiological signals to estimate the emotional states of human. However, wearable sensing devices could be inconvenient to use in the real world for nursing care robots or home-use robots because they may irritate wearing. To tackle this problem, we proposed a new approach for emotion estimation method. Various studies have proposed various methods to estimate human emotions such as recognition of facial expressions, postures, tone of voice, and speech, including the integration of those techniques for further classification. In this study, we focus on the facial expression recognition because facial expression can be captured by a camera instead of any wearable devices, which is easy to use and deploy. Our proposed method employed facial expression recognition with a graph-based technique using an open-source MediaPipe framework to extract face landmarks from photos and used Deep Learning technique to build emotion classification from face mesh data. To validate our proposed method, we used FER2013 dataset from Microsoft as a benchmark dataset. Finally, we implemented our method to the prototype of robot. The results show that our proposed method effectively classify emotions from facial expression captured by web camera, and the robot can interact according to the estimated emotions continuously over time.","subitem_description_type":"Other"}]},"item_18_description_8":{"attribute_name":"論文抄録(英)","attribute_value_mlt":[{"subitem_description":"Social robots play a crucial role in human-robot interaction, offering potential benefits in various applications, including nursing care and home assistance. Our previous study proposed “a prototype of multi-modal interaction robot based on emotion estimation method using physiological signals”, which presented our implementation on using electroencephalography (EEG) and Heart Rate Variability (HRV) as physiological signals to estimate the emotional states of human. However, wearable sensing devices could be inconvenient to use in the real world for nursing care robots or home-use robots because they may irritate wearing. To tackle this problem, we proposed a new approach for emotion estimation method. Various studies have proposed various methods to estimate human emotions such as recognition of facial expressions, postures, tone of voice, and speech, including the integration of those techniques for further classification. In this study, we focus on the facial expression recognition because facial expression can be captured by a camera instead of any wearable devices, which is easy to use and deploy. Our proposed method employed facial expression recognition with a graph-based technique using an open-source MediaPipe framework to extract face landmarks from photos and used Deep Learning technique to build emotion classification from face mesh data. To validate our proposed method, we used FER2013 dataset from Microsoft as a benchmark dataset. Finally, we implemented our method to the prototype of robot. The results show that our proposed method effectively classify emotions from facial expression captured by web camera, and the robot can interact according to the estimated emotions continuously over time.","subitem_description_type":"Other"}]},"item_18_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicPageEnd":"41","bibliographic_titles":[{"bibliographic_title":"Proceedings of Asia Pacific Conference on Robot IoT System Development and Platform"}],"bibliographicPageStart":"40","bibliographicIssueDates":{"bibliographicIssueDate":"2023-12-20","bibliographicIssueDateType":"Issued"},"bibliographicVolumeNumber":"2023"}]},"relation_version_is_last":true,"weko_creator_id":"44499"},"id":231576,"updated":"2025-01-19T10:42:52.477140+00:00","links":{},"created":"2025-01-19T01:31:57.580990+00:00"}