{"updated":"2025-01-22T05:07:51.968532+00:00","metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:00056095","sets":["1164:5064:5101:5103"]},"path":["5103"],"owner":"1","recid":"56095","title":["リアルタイム音楽情景記述システム:サビ区間検出手法"],"pubdate":{"attribute_name":"公開日","attribute_value":"2002-10-25"},"_buckets":{"deposit":"68968014-4040-4961-b120-b1622b3e0e88"},"_deposit":{"id":"56095","pid":{"type":"depid","value":"56095","revision_id":0},"owners":[1],"status":"published","created_by":1},"item_title":"リアルタイム音楽情景記述システム:サビ区間検出手法","author_link":["0","0"],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"リアルタイム音楽情景記述システム:サビ区間検出手法"},{"subitem_title":"A Real - time Music Scene Description System : A Chorus - Section Detecting Method","subitem_title_language":"en"}]},"item_type_id":"4","publish_date":"2002-10-25","item_4_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"科学技術振興事業団さきがけ研究21「情報と知」領域/産業技術総合研究所"}]},"item_4_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"“Information and Human Activity,\" PRESTO, Japan Science and Technology Corporation (JST) / National Institute of Advanced Industrial Science and Technology (AIST)","subitem_text_language":"en"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"jpn"}]},"item_publisher":{"attribute_name":"出版者","attribute_value_mlt":[{"subitem_publisher":"情報処理学会","subitem_publisher_language":"ja"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/56095/files/IPSJ-MUS02047006.pdf"},"date":[{"dateType":"Available","dateValue":"2004-10-25"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-MUS02047006.pdf","filesize":[{"value":"203.7 kB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"660","billingrole":"5"},{"tax":["include_tax"],"price":"330","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"21"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"65c37f36-f0ca-4234-8b0d-c45cd7ed57d6","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2002 by the Information Processing Society of Japan"}]},"item_4_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"後藤, 真孝"}],"nameIdentifiers":[{}]}]},"item_4_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Masataka, Goto","creatorNameLang":"en"}],"nameIdentifiers":[{}]}]},"item_4_source_id_9":{"attribute_name":"書誌レコードID","attribute_value_mlt":[{"subitem_source_identifier":"AN10438388","subitem_source_identifier_type":"NCID"}]},"item_4_textarea_12":{"attribute_name":"Notice","attribute_value_mlt":[{"subitem_textarea_value":"SIG Technical Reports are nonrefereed and hence may later appear in any journals, conferences, symposia, etc."}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_18gh","resourcetype":"technical report"}]},"item_4_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"本稿では、ポピュラー音楽の音響信号に対して、サビの区間の一覧を求める手法を提案する。従来、楽曲の音響信号中に何度も出現するサビのどこか一箇所を、指定した長さだけ切り出して提示する研究はあったが、サビ区間の開始点と終了点はわからず、サビの転調も扱えなかった。本手法は、様々な繰り返し区間の相互関係を調べることで、楽曲中で繰り返されるすべてのサビ区間を網羅的に検出し、それらの開始点と終了点を推定できる。また、転調後でも繰り返しと判断できる類似度を導入することで、転調を伴うサビも検出できる。この検出結果は、リアルタイム音楽情景記述システムにおける大局的な記述に相当する。RWC研究用音楽データベース100曲を用いて本手法を評価したところ、80曲のサビが検出できた。","subitem_description_type":"Other"}]},"item_4_description_8":{"attribute_name":"論文抄録(英)","attribute_value_mlt":[{"subitem_description":"This paper describes a method for obtaining a list of chorus sections in popular-music audio signals. Most previous methods detected a repeated section of a given length as a chorus and had difficulty in identifying both ends of a chorus section and in dealing with modulations (key changes). By analyzing relationships among various repeated sections, our method can detect all the repeated chorus sections in a song and estimate their both ends. It can also detect modulated chorus sections by introducing a similarity measure that enables correct judgement in finding modulated repetition. The detected results correspond to global music descriptions in our real-time music scene description system. Experimental results with the RWC Music Database showed that our method correctly dealt with 80 out of 100 songs.","subitem_description_type":"Other"}]},"item_4_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicPageEnd":"34","bibliographic_titles":[{"bibliographic_title":"情報処理学会研究報告音楽情報科学(MUS)"}],"bibliographicPageStart":"27","bibliographicIssueDates":{"bibliographicIssueDate":"2002-10-25","bibliographicIssueDateType":"Issued"},"bibliographicIssueNumber":"100(2002-MUS-047)","bibliographicVolumeNumber":"2002"}]},"relation_version_is_last":true,"weko_creator_id":"1"},"created":"2025-01-18T23:19:35.772230+00:00","id":56095,"links":{}}