{"created":"2025-01-19T01:14:55.102276+00:00","updated":"2025-01-19T16:54:31.476187+00:00","metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:00214083","sets":["1164:4179:10535:10759"]},"path":["10759"],"owner":"44499","recid":"214083","title":["簡易小型化BERTを利用した日本語構文解析"],"pubdate":{"attribute_name":"公開日","attribute_value":"2021-11-24"},"_buckets":{"deposit":"5a79cc24-ea84-44db-9c63-99a4753d9d03"},"_deposit":{"id":"214083","pid":{"type":"depid","value":"214083","revision_id":0},"owners":[44499],"status":"published","created_by":44499},"item_title":"簡易小型化BERTを利用した日本語構文解析","author_link":["548567","548564","548566","548565"],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"簡易小型化BERTを利用した日本語構文解析"},{"subitem_title":"Japanese parsing using simplified miniaturized BERT","subitem_title_language":"en"}]},"item_keyword":{"attribute_name":"キーワード","attribute_value_mlt":[{"subitem_subject":"深層学習モデル","subitem_subject_scheme":"Other"}]},"item_type_id":"4","publish_date":"2021-11-24","item_4_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"茨城大学大学院理工学研究科情報工学専攻"},{"subitem_text_value":"茨城大学大学院理工学研究科情報工学専攻"}]},"item_4_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"Ibaraki University","subitem_text_language":"en"},{"subitem_text_value":"Ibaraki University","subitem_text_language":"en"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"jpn"}]},"item_publisher":{"attribute_name":"出版者","attribute_value_mlt":[{"subitem_publisher":"情報処理学会","subitem_publisher_language":"ja"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/214083/files/IPSJ-NL21251020.pdf","label":"IPSJ-NL21251020.pdf"},"date":[{"dateType":"Available","dateValue":"2023-11-24"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-NL21251020.pdf","filesize":[{"value":"941.2 kB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"660","billingrole":"5"},{"tax":["include_tax"],"price":"330","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"23"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"4f6c92b3-6d3d-4d85-ad50-205920a2141e","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2021 by the Information Processing Society of Japan"}]},"item_4_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"河野, 慎司"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"新納, 浩幸"}],"nameIdentifiers":[{}]}]},"item_4_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Shinji, Kono","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Hiroyuki, Shinnou","creatorNameLang":"en"}],"nameIdentifiers":[{}]}]},"item_4_source_id_9":{"attribute_name":"書誌レコードID","attribute_value_mlt":[{"subitem_source_identifier":"AN10115061","subitem_source_identifier_type":"NCID"}]},"item_4_textarea_12":{"attribute_name":"Notice","attribute_value_mlt":[{"subitem_textarea_value":"SIG Technical Reports are nonrefereed and hence may later appear in any journals, conferences, symposia, etc."}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_18gh","resourcetype":"technical report"}]},"item_4_source_id_11":{"attribute_name":"ISSN","attribute_value_mlt":[{"subitem_source_identifier":"2188-8779","subitem_source_identifier_type":"ISSN"}]},"item_4_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"BERT は fine-tuning することで様々な NLP タスクに対して高精度を出せる事前学習済みモデルであるが,fine-tuning には多くのパラメータを調整する必要があるため学習や推論に時間がかかるという問題がある.本論文では日本語構文解析に対して,BERT の一部の層を削除した簡易小型化 BERT の利用を提案する.実験では,京都大学ウェブ文書リードコーパスと京都大学テキストコーパスを混合したデータを用いて,京大版の BERT とそこから構築した簡易小型化 BERT の解析精度と処理時間を比較した.提案する簡易小型化 BERT では,京大版の BERT からの精度劣化をウェブコーパスで 0.2%,テキストコーパスで 0.79% に押さえながら,学習時間は 82%,推論時間はウェブコーパスで 66%,テキストコーパスで 84% まで削減することができた.","subitem_description_type":"Other"}]},"item_4_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicPageEnd":"5","bibliographic_titles":[{"bibliographic_title":"研究報告自然言語処理(NL)"}],"bibliographicPageStart":"1","bibliographicIssueDates":{"bibliographicIssueDate":"2021-11-24","bibliographicIssueDateType":"Issued"},"bibliographicIssueNumber":"20","bibliographicVolumeNumber":"2021-NL-251"}]},"relation_version_is_last":true,"weko_creator_id":"44499"},"id":214083,"links":{}}