{"metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:00208969","sets":["581:10433:10434"]},"path":["10434"],"owner":"44499","recid":"208969","title":["Methods for Efficiently Constructing Text-dialogue-agent System using Existing Anime Characters"],"pubdate":{"attribute_name":"公開日","attribute_value":"2021-01-15"},"_buckets":{"deposit":"fb12a042-890b-49e9-bb1a-8c0f6a338f71"},"_deposit":{"id":"208969","pid":{"type":"depid","value":"208969","revision_id":0},"owners":[44499],"status":"published","created_by":44499},"item_title":"Methods for Efficiently Constructing Text-dialogue-agent System using Existing Anime Characters","author_link":["525558","525561","525547","525549","525562","525559","525550","525553","525548","525556","525557","525560","525563","525544","525552","525546","525554","525555","525551","525545"],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"Methods for Efficiently Constructing Text-dialogue-agent System using Existing Anime Characters"},{"subitem_title":"Methods for Efficiently Constructing Text-dialogue-agent System using Existing Anime Characters","subitem_title_language":"en"}]},"item_keyword":{"attribute_name":"キーワード","attribute_value_mlt":[{"subitem_subject":"[特集:持続可能な社会を実現するコラボレーション技術とネットワークサービス] text-dialogue-agent system, existing anime character, efficient construction method, utterance generation, motion generation","subitem_subject_scheme":"Other"}]},"item_type_id":"2","publish_date":"2021-01-15","item_2_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation"},{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation"},{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation"},{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation"},{"subitem_text_value":"NTT Communication Science Laboratories, NTT Corporation"},{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation"},{"subitem_text_value":"DWANGO Co., Ltd."},{"subitem_text_value":"DWANGO Co., Ltd."},{"subitem_text_value":"DWANGO Co., Ltd."},{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation"}]},"item_2_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation","subitem_text_language":"en"},{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation","subitem_text_language":"en"},{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation","subitem_text_language":"en"},{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation","subitem_text_language":"en"},{"subitem_text_value":"NTT Communication Science Laboratories, NTT Corporation","subitem_text_language":"en"},{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation","subitem_text_language":"en"},{"subitem_text_value":"DWANGO Co., Ltd.","subitem_text_language":"en"},{"subitem_text_value":"DWANGO Co., Ltd.","subitem_text_language":"en"},{"subitem_text_value":"DWANGO Co., Ltd.","subitem_text_language":"en"},{"subitem_text_value":"NTT Media Intelligence Laboratories, NTT Corporation","subitem_text_language":"en"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"eng"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/208969/files/IPSJ-JNL6201006.pdf","label":"IPSJ-JNL6201006.pdf"},"date":[{"dateType":"Available","dateValue":"2023-01-15"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-JNL6201006.pdf","filesize":[{"value":"5.5 MB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"0","billingrole":"5"},{"tax":["include_tax"],"price":"0","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"8"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"23ddb7f3-3c4c-4ec6-a569-16961cf6d4f3","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2021 by the Information Processing Society of Japan"}]},"item_2_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Ryo, Ishii"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Ryuichiro, Higashinaka"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Koh, Mitsuda"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Taichi, Katayama"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Masahiro, Mizukami"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Junji, Tomita"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Hidetoshi, Kawabata"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Emi, Yamaguchi"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Noritake, Adachi"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Yushi, Aono"}],"nameIdentifiers":[{}]}]},"item_2_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Ryo, Ishii","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Ryuichiro, Higashinaka","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Koh, Mitsuda","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Taichi, Katayama","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Masahiro, Mizukami","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Junji, Tomita","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Hidetoshi, Kawabata","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Emi, Yamaguchi","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Noritake, Adachi","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Yushi, Aono","creatorNameLang":"en"}],"nameIdentifiers":[{}]}]},"item_2_source_id_9":{"attribute_name":"書誌レコードID","attribute_value_mlt":[{"subitem_source_identifier":"AN00116647","subitem_source_identifier_type":"NCID"}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_6501","resourcetype":"journal article"}]},"item_2_source_id_11":{"attribute_name":"ISSN","attribute_value_mlt":[{"subitem_source_identifier":"1882-7764","subitem_source_identifier_type":"ISSN"}]},"item_2_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"Starting from their early years, many persons dream of being able to chat with their favorite anime characters. To make such a dream possible, we propose an efficient method for constructing a system that enables users to text chat with existing anime characters. We tackled two research problems to generate verbal and nonverbal behaviors for a text-chat agent system utilizing an existing character. A major issue in creating verbal behavior is generating utterance text that reflects the personality of existing characters in response to any user questions. To cope with this problem we propose use of role play-based question-answering to efficiently collect high-quality paired data of user questions and system answers reflecting the personality of an anime character. We also propose a new utterance generation method that uses a neural translation model with the collected data. Rich and natural expressions of nonverbal behavior greatly enhance the appeal of agent systems. However, not all existing anime characters move as naturally and as diversely as humans. Therefore, we propose a method that can automatically generate whole-body motion from spoken text in order to give the anime characters natural, human-like movements. In addition to these movements, we try to add a small amount of characteristic movement on a rule basis to reflect personality. We created a text-dialogue agent system of a popular existing anime character using our proposed generation methods. As a result of a subjective evaluation of the implemented system, our methods for generating verbal and nonverbal behavior improved the impression of the agent's responsiveness and reflected the personality of the character. Since generating characteristic motions with a small amount of characteristic movement on the basis of heuristic rules was not effective, our proposed motion generation method which can generate the average motion of many people, is useful for generating motion for existing anime characters. Therefore, our proposed methods for generating verbal and nonverbal behaviors and the system-construction method are likely to prove a powerful tool for achieving text-dialogue agent systems for existing characters.\n------------------------------\nThis is a preprint of an article intended for publication Journal of\nInformation Processing(JIP). This preprint should not be cited. This\narticle should be cited as: Journal of Information Processing Vol.29(2021) (online)\nDOI http://dx.doi.org/10.2197/ipsjjip.29.30\n------------------------------","subitem_description_type":"Other"}]},"item_2_description_8":{"attribute_name":"論文抄録(英)","attribute_value_mlt":[{"subitem_description":"Starting from their early years, many persons dream of being able to chat with their favorite anime characters. To make such a dream possible, we propose an efficient method for constructing a system that enables users to text chat with existing anime characters. We tackled two research problems to generate verbal and nonverbal behaviors for a text-chat agent system utilizing an existing character. A major issue in creating verbal behavior is generating utterance text that reflects the personality of existing characters in response to any user questions. To cope with this problem we propose use of role play-based question-answering to efficiently collect high-quality paired data of user questions and system answers reflecting the personality of an anime character. We also propose a new utterance generation method that uses a neural translation model with the collected data. Rich and natural expressions of nonverbal behavior greatly enhance the appeal of agent systems. However, not all existing anime characters move as naturally and as diversely as humans. Therefore, we propose a method that can automatically generate whole-body motion from spoken text in order to give the anime characters natural, human-like movements. In addition to these movements, we try to add a small amount of characteristic movement on a rule basis to reflect personality. We created a text-dialogue agent system of a popular existing anime character using our proposed generation methods. As a result of a subjective evaluation of the implemented system, our methods for generating verbal and nonverbal behavior improved the impression of the agent's responsiveness and reflected the personality of the character. Since generating characteristic motions with a small amount of characteristic movement on the basis of heuristic rules was not effective, our proposed motion generation method which can generate the average motion of many people, is useful for generating motion for existing anime characters. Therefore, our proposed methods for generating verbal and nonverbal behaviors and the system-construction method are likely to prove a powerful tool for achieving text-dialogue agent systems for existing characters.\n------------------------------\nThis is a preprint of an article intended for publication Journal of\nInformation Processing(JIP). This preprint should not be cited. This\narticle should be cited as: Journal of Information Processing Vol.29(2021) (online)\nDOI http://dx.doi.org/10.2197/ipsjjip.29.30\n------------------------------","subitem_description_type":"Other"}]},"item_2_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographic_titles":[{"bibliographic_title":"情報処理学会論文誌"}],"bibliographicIssueDates":{"bibliographicIssueDate":"2021-01-15","bibliographicIssueDateType":"Issued"},"bibliographicIssueNumber":"1","bibliographicVolumeNumber":"62"}]},"relation_version_is_last":true,"weko_creator_id":"44499"},"id":208969,"updated":"2025-01-19T18:39:23.666508+00:00","links":{},"created":"2025-01-19T01:10:18.760257+00:00"}