{"created":"2025-01-19T01:33:16.447101+00:00","updated":"2025-01-19T10:26:51.987561+00:00","metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:00232418","sets":["581:11492:11494"]},"path":["11494"],"owner":"44499","recid":"232418","title":["Selecting Iconic Gesture Forms Based on Typical Entity Images"],"pubdate":{"attribute_name":"公開日","attribute_value":"2024-02-15"},"_buckets":{"deposit":"14e33632-837f-4c70-ab02-076ebbf8ff6a"},"_deposit":{"id":"232418","pid":{"type":"depid","value":"232418","revision_id":0},"owners":[44499],"status":"published","created_by":44499},"item_title":"Selecting Iconic Gesture Forms Based on Typical Entity Images","author_link":["628997","628998","629003","629002","629004","629001","628999","629000"],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"Selecting Iconic Gesture Forms Based on Typical Entity Images"},{"subitem_title":"Selecting Iconic Gesture Forms Based on Typical Entity Images","subitem_title_language":"en"}]},"item_keyword":{"attribute_name":"キーワード","attribute_value_mlt":[{"subitem_subject":"[特集:インタラクションの理解および基盤・応用技術] gesture generation, gesture form, iconic gesture, image representation, deep neural network","subitem_subject_scheme":"Other"}]},"item_type_id":"2","publish_date":"2024-02-15","item_2_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"Faculty of Science and Technology, Seikei University"},{"subitem_text_value":"Human Informatics Laboratories, NTT Corporation"},{"subitem_text_value":"Human Informatics Laboratories, NTT Corporation"},{"subitem_text_value":"Human Informatics Laboratories, NTT Corporation"}]},"item_2_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"Faculty of Science and Technology, Seikei University","subitem_text_language":"en"},{"subitem_text_value":"Human Informatics Laboratories, NTT Corporation","subitem_text_language":"en"},{"subitem_text_value":"Human Informatics Laboratories, NTT Corporation","subitem_text_language":"en"},{"subitem_text_value":"Human Informatics Laboratories, NTT Corporation","subitem_text_language":"en"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"eng"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/232418/files/IPSJ-JNL6502032.pdf","label":"IPSJ-JNL6502032.pdf"},"date":[{"dateType":"Available","dateValue":"2026-02-15"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-JNL6502032.pdf","filesize":[{"value":"4.6 MB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"0","billingrole":"5"},{"tax":["include_tax"],"price":"0","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"8"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"7585eb77-43c2-43e8-9569-2151e1850bc0","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2024 by the Information Processing Society of Japan"}]},"item_2_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Yukiko, I. Nakano"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Fumio, Nihei"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Ryo, Ishii"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Ryuichiro, Higashinaka"}],"nameIdentifiers":[{}]}]},"item_2_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Yukiko, I. Nakano","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Fumio, Nihei","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Ryo, Ishii","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Ryuichiro, Higashinaka","creatorNameLang":"en"}],"nameIdentifiers":[{}]}]},"item_2_source_id_9":{"attribute_name":"書誌レコードID","attribute_value_mlt":[{"subitem_source_identifier":"AN00116647","subitem_source_identifier_type":"NCID"}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_6501","resourcetype":"journal article"}]},"item_2_publisher_15":{"attribute_name":"公開者","attribute_value_mlt":[{"subitem_publisher":"情報処理学会","subitem_publisher_language":"ja"}]},"item_2_source_id_11":{"attribute_name":"ISSN","attribute_value_mlt":[{"subitem_source_identifier":"1882-7764","subitem_source_identifier_type":"ISSN"}]},"item_2_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"Hand gestures are communication signals that emphasize an important part of an utterance and express the concept of emphasized words. Iconic gestures are hand gestures that depict concrete actions, objects, or events mentioned in speech. In this study, assuming that gesture forms of iconic gestures are determined based on the image of a given object in the speaker's mind, we propose a method for selecting iconic gesture forms based on the image representation obtained from a set of pictures of an object. First, we asked annotators to select a gesture form that best expresses the meaning of a given word based on a typical image and concept in their minds. We also collected a set of pictures of each entity from the web and created an average image representation from them. We then created a Deep Neural Network (DNN) model that takes a set of pictures of objects as input and predicts the typical gesture form that originates from the human mind. In the model evaluation experiment, our two-step gesture form selection method successfully classified seven types of gesture forms with an accuracy of over 62%. Furthermore, we created character animations that performed selected gestures and conducted a preliminary perception study to examine how human users perceive animated iconic gestures.\n------------------------------\nThis is a preprint of an article intended for publication Journal of\nInformation Processing(JIP). This preprint should not be cited. This\narticle should be cited as: Journal of Information Processing Vol.32(2024) (online)\nDOI http://dx.doi.org/10.2197/ipsjjip.32.196\n------------------------------","subitem_description_type":"Other"}]},"item_2_description_8":{"attribute_name":"論文抄録(英)","attribute_value_mlt":[{"subitem_description":"Hand gestures are communication signals that emphasize an important part of an utterance and express the concept of emphasized words. Iconic gestures are hand gestures that depict concrete actions, objects, or events mentioned in speech. In this study, assuming that gesture forms of iconic gestures are determined based on the image of a given object in the speaker's mind, we propose a method for selecting iconic gesture forms based on the image representation obtained from a set of pictures of an object. First, we asked annotators to select a gesture form that best expresses the meaning of a given word based on a typical image and concept in their minds. We also collected a set of pictures of each entity from the web and created an average image representation from them. We then created a Deep Neural Network (DNN) model that takes a set of pictures of objects as input and predicts the typical gesture form that originates from the human mind. In the model evaluation experiment, our two-step gesture form selection method successfully classified seven types of gesture forms with an accuracy of over 62%. Furthermore, we created character animations that performed selected gestures and conducted a preliminary perception study to examine how human users perceive animated iconic gestures.\n------------------------------\nThis is a preprint of an article intended for publication Journal of\nInformation Processing(JIP). This preprint should not be cited. This\narticle should be cited as: Journal of Information Processing Vol.32(2024) (online)\nDOI http://dx.doi.org/10.2197/ipsjjip.32.196\n------------------------------","subitem_description_type":"Other"}]},"item_2_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographic_titles":[{"bibliographic_title":"情報処理学会論文誌"}],"bibliographicIssueDates":{"bibliographicIssueDate":"2024-02-15","bibliographicIssueDateType":"Issued"},"bibliographicIssueNumber":"2","bibliographicVolumeNumber":"65"}]},"relation_version_is_last":true,"weko_creator_id":"44499"},"id":232418,"links":{}}