{"metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:00211396","sets":["1164:5336:10549:10585"]},"path":["10585"],"owner":"44499","recid":"211396","title":["スマートフォンのみを用いた周囲環境への視線入力インタフェースの検討"],"pubdate":{"attribute_name":"公開日","attribute_value":"2021-05-25"},"_buckets":{"deposit":"a5e01a2f-083b-4537-ab41-954aeeec3df9"},"_deposit":{"id":"211396","pid":{"type":"depid","value":"211396","revision_id":0},"owners":[44499],"status":"published","created_by":44499},"item_title":"スマートフォンのみを用いた周囲環境への視線入力インタフェースの検討","author_link":["537061","537060","537059","537055","537058","537056","537054","537057"],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"スマートフォンのみを用いた周囲環境への視線入力インタフェースの検討"},{"subitem_title":"A Gaze Input Interface to Surrounding Environments Using a Single Smartphone","subitem_title_language":"en"}]},"item_keyword":{"attribute_name":"キーワード","attribute_value_mlt":[{"subitem_subject":"入力・デバイス","subitem_subject_scheme":"Other"}]},"item_type_id":"4","publish_date":"2021-05-25","item_4_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"東北大学電気通信研究所"},{"subitem_text_value":"東北大学電気通信研究所"},{"subitem_text_value":"東北大学電気通信研究所"},{"subitem_text_value":"東北大学電気通信研究所\n "}]},"item_4_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"Research Institute of Electrical Communication, Tohoku University","subitem_text_language":"en"},{"subitem_text_value":"Research Institute of Electrical Communication, Tohoku University","subitem_text_language":"en"},{"subitem_text_value":"Research Institute of Electrical Communication, Tohoku University","subitem_text_language":"en"},{"subitem_text_value":"Research Institute of Electrical Communication, Tohoku University","subitem_text_language":"en"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"jpn"}]},"item_publisher":{"attribute_name":"出版者","attribute_value_mlt":[{"subitem_publisher":"情報処理学会","subitem_publisher_language":"ja"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/211396/files/IPSJ-EC21060010.pdf","label":"IPSJ-EC21060010.pdf"},"date":[{"dateType":"Available","dateValue":"2023-05-25"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-EC21060010.pdf","filesize":[{"value":"2.0 MB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"660","billingrole":"5"},{"tax":["include_tax"],"price":"330","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"40"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"1af8bac6-6edd-4873-bdf4-65b0408f68ff","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2021 by the Information Processing Society of Japan"}]},"item_4_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"永井, 崇大"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"藤田, 和之"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"高嶋, 和毅"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"北村, 喜文"}],"nameIdentifiers":[{}]}]},"item_4_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Takahiro, Nagai","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Kazuyuki, Fujita","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Kazuki, Takashima","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Yoshifumi, Kitamura","creatorNameLang":"en"}],"nameIdentifiers":[{}]}]},"item_4_source_id_9":{"attribute_name":"書誌レコードID","attribute_value_mlt":[{"subitem_source_identifier":"AA12049625","subitem_source_identifier_type":"NCID"}]},"item_4_textarea_12":{"attribute_name":"Notice","attribute_value_mlt":[{"subitem_textarea_value":"SIG Technical Reports are nonrefereed and hence may later appear in any journals, conferences, symposia, etc."}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_18gh","resourcetype":"technical report"}]},"item_4_source_id_11":{"attribute_name":"ISSN","attribute_value_mlt":[{"subitem_source_identifier":"2188-8914","subitem_source_identifier_type":"ISSN"}]},"item_4_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"Gaze is a useful input modality for estimating a user’s region of interest and pointing to a distant target, but the challenge is that it usually requires the installation or wearing of additional specialized devices. In this work, we propose a novel user interface that enables gaze input to the user’s surrounding environment using only a widely-used smartphone. By simultaneously using both front and rear cameras and a depth sensor on the smartphone, it can track the user’s head orientation while recognizing its own 3D position in a known 3D map. This allows the system to estimate the user’s 3D head-gaze direction to the surrounding environment. We conducted an early performance test to evaluate the accuracy of head-gaze estimation using our interface. Based on these results, we estimated that the required target size for avoiding erroneous input is 1.64 m × 0.94 m. Finally we discussed the interactions in which the proposal is effective.","subitem_description_type":"Other"}]},"item_4_description_8":{"attribute_name":"論文抄録(英)","attribute_value_mlt":[{"subitem_description":"Gaze is a useful input modality for estimating a user’s region of interest and pointing to a distant target, but the challenge is that it usually requires the installation or wearing of additional specialized devices. In this work, we propose a novel user interface that enables gaze input to the user’s surrounding environment using only a widely-used smartphone. By simultaneously using both front and rear cameras and a depth sensor on the smartphone, it can track the user’s head orientation while recognizing its own 3D position in a known 3D map. This allows the system to estimate the user’s 3D head-gaze direction to the surrounding environment. We conducted an early performance test to evaluate the accuracy of head-gaze estimation using our interface. Based on these results, we estimated that the required target size for avoiding erroneous input is 1.64 m × 0.94 m. Finally we discussed the interactions in which the proposal is effective.","subitem_description_type":"Other"}]},"item_4_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicPageEnd":"4","bibliographic_titles":[{"bibliographic_title":"研究報告エンタテインメントコンピューティング(EC)"}],"bibliographicPageStart":"1","bibliographicIssueDates":{"bibliographicIssueDate":"2021-05-25","bibliographicIssueDateType":"Issued"},"bibliographicIssueNumber":"10","bibliographicVolumeNumber":"2021-EC-60"}]},"relation_version_is_last":true,"weko_creator_id":"44499"},"id":211396,"updated":"2025-01-19T17:48:53.293465+00:00","links":{},"created":"2025-01-19T01:12:34.586400+00:00"}