{"updated":"2025-01-19T10:43:09.822427+00:00","metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:00231557","sets":["581:11107:11121"]},"path":["11121"],"owner":"44499","recid":"231557","title":["正規分布に従う低次元特徴量とノイズ付与機構によるドメインシフト軽減のためのタスクヘッド"],"pubdate":{"attribute_name":"公開日","attribute_value":"2023-12-15"},"_buckets":{"deposit":"50df24c4-3740-4419-9434-3c2fa476ee67"},"_deposit":{"id":"231557","pid":{"type":"depid","value":"231557","revision_id":0},"owners":[44499],"status":"published","created_by":44499},"item_title":"正規分布に従う低次元特徴量とノイズ付与機構によるドメインシフト軽減のためのタスクヘッド","author_link":["625266","625269","625267","625268"],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"正規分布に従う低次元特徴量とノイズ付与機構によるドメインシフト軽減のためのタスクヘッド"},{"subitem_title":"Task Head to Reduce Domain Shift with a Noise Imposition Mechanism and Low Dimensional Features Following a Normal Distribution","subitem_title_language":"en"}]},"item_keyword":{"attribute_name":"キーワード","attribute_value_mlt":[{"subitem_subject":"[一般論文] ドメイン適応,ドメインシフト,マルチタスク学習,特徴抽出,自然言語処理","subitem_subject_scheme":"Other"}]},"item_type_id":"2","publish_date":"2023-12-15","item_2_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"横浜国立大学大学院理工学府"},{"subitem_text_value":"横浜国立大学大学院工学研究院"}]},"item_2_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"Yokohama National University","subitem_text_language":"en"},{"subitem_text_value":"Graduate School of Engineering Science, Yokohama National University","subitem_text_language":"en"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"jpn"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/231557/files/IPSJ-JNL6412015.pdf","label":"IPSJ-JNL6412015.pdf"},"date":[{"dateType":"Available","dateValue":"2025-12-15"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-JNL6412015.pdf","filesize":[{"value":"1.7 MB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"660","billingrole":"5"},{"tax":["include_tax"],"price":"330","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"8"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"bfbc86d8-d8c1-4d50-a7b9-4507ffb24771","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2023 by the Information Processing Society of Japan"}]},"item_2_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"藤井, 巧朗"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"濱上, 知樹"}],"nameIdentifiers":[{}]}]},"item_2_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Takuro, Fujii","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Tomoki, Hamagami","creatorNameLang":"en"}],"nameIdentifiers":[{}]}]},"item_2_source_id_9":{"attribute_name":"書誌レコードID","attribute_value_mlt":[{"subitem_source_identifier":"AN00116647","subitem_source_identifier_type":"NCID"}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_6501","resourcetype":"journal article"}]},"item_2_publisher_15":{"attribute_name":"公開者","attribute_value_mlt":[{"subitem_publisher":"情報処理学会","subitem_publisher_language":"ja"}]},"item_2_source_id_11":{"attribute_name":"ISSN","attribute_value_mlt":[{"subitem_source_identifier":"1882-7764","subitem_source_identifier_type":"ISSN"}]},"item_2_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"機械学習システムは訓練データとテストデータが同一の分布に従うものと仮定した状況下で動作する場合が多い.しかし,それらの分布は異なることが多く,ドメインシフトにより実用時に性能が低下してしまうという問題がある.本論文では,自然言語処理分野におけるドメインシフトの課題を解決するために,事前学習済み言語モデル(PLM)のFine-Tuningプロセスに着目した教師なしドメイン適応(UDA)に取り組んだ.本論文はPLMのFine-Tuningプロセスにおいて,正規分布に従う低次元の特徴量を獲得すると同時にノイズを付与するGaussian Layerを提案し,タスクヘッドに適用することでドメインシフトを軽減する.実験結果より,Gaussian Layerは特にソース・ターゲットドメイン距離が遠いより困難な設定で優位であることが確認された.また,分布整合分析より,Gaussian Layerは従来のUDA手法と比較してソース・ターゲットドメイン分布を整合することが確認でき,ドメイン不変な表現を獲得できることを示した.","subitem_description_type":"Other"}]},"item_2_description_8":{"attribute_name":"論文抄録(英)","attribute_value_mlt":[{"subitem_description":"In machine learning systems, it is often assumed that the train and test data follow the same distribution. However, they often follow different distributions, leading to a decrease in performance due to domain shift. In this paper, we study unsupervised domain adaptation (UDA) focusing on the fine-tuning process of pre-trained language models (PLM) to reduce domain shift in the natural language processing field. We propose Gaussian Layer which obtains low-dimensional features that follow a normal distribution and simultaneously adds noise, and apply it to the task head. From experimental results, we show that Gaussian Layer is superior to an existing UDA method in difficult settings where the distance between source and target domain is far apart. Additional analysis shows that Gaussian Layer can align source and target distributions better than an existing UDA method, indicating that it can obtain domain-invariant representations.","subitem_description_type":"Other"}]},"item_2_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicPageEnd":"1677","bibliographic_titles":[{"bibliographic_title":"情報処理学会論文誌"}],"bibliographicPageStart":"1668","bibliographicIssueDates":{"bibliographicIssueDate":"2023-12-15","bibliographicIssueDateType":"Issued"},"bibliographicIssueNumber":"12","bibliographicVolumeNumber":"64"}]},"relation_version_is_last":true,"item_2_identifier_registration":{"attribute_name":"ID登録","attribute_value_mlt":[{"subitem_identifier_reg_text":"10.20729/00231447","subitem_identifier_reg_type":"JaLC"}]},"weko_creator_id":"44499"},"created":"2025-01-19T01:31:55.768831+00:00","id":231557,"links":{}}