{"updated":"2025-01-22T14:02:49.708028+00:00","metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:00036908","sets":["1164:3027:3062:3063"]},"path":["3063"],"owner":"1","recid":"36908","title":["自然対話におけるジェスチャーの相互的関係の分析"],"pubdate":{"attribute_name":"公開日","attribute_value":"2003-01-30"},"_buckets":{"deposit":"d7e2219d-1ae4-4f54-b17f-00ab5281736c"},"_deposit":{"id":"36908","pid":{"type":"depid","value":"36908","revision_id":0},"owners":[1],"status":"published","created_by":1},"item_title":"自然対話におけるジェスチャーの相互的関係の分析","author_link":["0","0"],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"自然対話におけるジェスチャーの相互的関係の分析"},{"subitem_title":"Analysis of Correlation between Interlocutors'Gestures in Spontaneous Speech","subitem_title_language":"en"}]},"item_type_id":"4","publish_date":"2003-01-30","item_4_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"千葉大学"},{"subitem_text_value":"千葉大学"},{"subitem_text_value":"千葉大学"}]},"item_4_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"Chiba University","subitem_text_language":"en"},{"subitem_text_value":"Chiba University","subitem_text_language":"en"},{"subitem_text_value":"Chiba University","subitem_text_language":"en"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"jpn"}]},"item_publisher":{"attribute_name":"出版者","attribute_value_mlt":[{"subitem_publisher":"情報処理学会","subitem_publisher_language":"ja"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/36908/files/IPSJ-HI02102007.pdf"},"date":[{"dateType":"Available","dateValue":"2005-01-30"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-HI02102007.pdf","filesize":[{"value":"490.8 kB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"660","billingrole":"5"},{"tax":["include_tax"],"price":"330","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"33"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"83bbe802-8d91-4a10-b75a-fdbf17f8c50b","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2003 by the Information Processing Society of Japan"}]},"item_4_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"前田, 真季子"},{"creatorName":"堀内, 靖雄"},{"creatorName":"市川, 熹"}],"nameIdentifiers":[{}]}]},"item_4_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Makiko, Maeda","creatorNameLang":"en"},{"creatorName":"Yasuo, Horiuchi","creatorNameLang":"en"},{"creatorName":"Akira, Ichikawa","creatorNameLang":"en"}],"nameIdentifiers":[{}]}]},"item_4_source_id_9":{"attribute_name":"書誌レコードID","attribute_value_mlt":[{"subitem_source_identifier":"AA1221543X","subitem_source_identifier_type":"NCID"}]},"item_4_textarea_12":{"attribute_name":"Notice","attribute_value_mlt":[{"subitem_textarea_value":"SIG Technical Reports are nonrefereed and hence may later appear in any journals, conferences, symposia, etc."}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_18gh","resourcetype":"technical report"}]},"item_4_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"人は視線の動きやうなずきなどのジェスチャーを用いて、対話の円滑なやり取りを行なっている。自然対話は話者同士の音声情報、視覚情報を用いた相互作用によって進行していくものであるため、音声におけるあいづち現象などと同様に、ジェスチャー同士にも話者間に相互作用が生じていることが推測される。そこで、本論文では、特にうなずきに着目し、ジェスチャーによる相互作用を分析した。分析に用いたデータは、6組の親しい友人同士による対話であり、収録には正面映像を撮ることが可能な、2つのプロンプターを使用した。そして、その収録データを一般に公開されているアノテーションツール“ANVIL”を用いて、アノテートし、分析を行なった。分析の結果、うなずきは、あいづちと同様に相手話者の発話に対する何らかの応答動作として生じる場合よりも、自己発話内の方が多く生じる傾向が見られた。また、うなずきが二人の話者で同時に発生する現象が多いことも示唆された。","subitem_description_type":"Other"}]},"item_4_description_8":{"attribute_name":"論文抄録(英)","attribute_value_mlt":[{"subitem_description":"People use gestures like gaze and nod for smooth communication in dialogue. Usual dialogue continues exchanging interlocutor's information with each other using speech and gestures and therefore it is supposed that there is correlation between interlocutors' gestures as backchannels in speech. In this paper, we focused nods for the analyses of gestures. 18 dialogues by six pairs of good friends were recorded, where they can look at each other via two prompters. The prompter can record the interlocutors' gesture on videotape and project the partner's image through a half mirror. We annotated recorded dialogue by using the annotation tool \"ANVIL\" developed by Michael Kipp and the transcription tool developed by ours. As a result, it was suggested that gestures are caused more frequently when an interlocutor is speaking than listening, and interlocutors tend to nod simultaneously with considerable frequency.","subitem_description_type":"Other"}]},"item_4_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicPageEnd":"46","bibliographic_titles":[{"bibliographic_title":"情報処理学会研究報告ヒューマンコンピュータインタラクション(HCI)"}],"bibliographicPageStart":"39","bibliographicIssueDates":{"bibliographicIssueDate":"2003-01-30","bibliographicIssueDateType":"Issued"},"bibliographicIssueNumber":"9(2002-HI-102)","bibliographicVolumeNumber":"2002"}]},"relation_version_is_last":true,"weko_creator_id":"1"},"created":"2025-01-18T23:04:53.086736+00:00","id":36908,"links":{}}