{"created":"2025-02-20T07:22:25.913093+00:00","metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:02000770","sets":["934:1022:11800:11801"]},"path":["11801"],"owner":"80578","recid":"2000770","title":["開発コスト軽減化のためのマルチタスク強化学習を用いた系列推薦システム"],"pubdate":{"attribute_name":"PubDate","attribute_value":"2025-01-28"},"_buckets":{"deposit":"1dce9bc5-9a1c-4fe7-a198-5f868a524dd1"},"_deposit":{"id":"2000770","pid":{"type":"depid","value":"2000770","revision_id":0},"owners":[80578],"status":"published","created_by":80578},"item_title":"開発コスト軽減化のためのマルチタスク強化学習を用いた系列推薦システム","author_link":[],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"開発コスト軽減化のためのマルチタスク強化学習を用いた系列推薦システム","subitem_title_language":"ja"},{"subitem_title":"Sequential Recommender System Using Multi-Task Reinforcement Learning to Reduce Development Costs","subitem_title_language":"en"}]},"item_keyword":{"attribute_name":"キーワード","attribute_value_mlt":[{"subitem_subject":"[研究論文] 推薦システム,強化学習,マルチタスク学習","subitem_subject_scheme":"Other"}]},"item_type_id":"3","publish_date":"2025-01-28","item_3_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"同志社大学大学院文化情報学研究科"},{"subitem_text_value":"同志社大学大学院文化情報学研究科"},{"subitem_text_value":"同志社大学大学院文化情報学研究科"}]},"item_3_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"Graduate School of Culture and Information Science, Doshisha University","subitem_text_language":"en"},{"subitem_text_value":"Graduate School of Culture and Information Science, Doshisha University","subitem_text_language":"en"},{"subitem_text_value":"Graduate School of Culture and Information Science, Doshisha University","subitem_text_language":"en"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"jpn"}]},"item_publisher":{"attribute_name":"出版者","attribute_value_mlt":[{"subitem_publisher":"情報処理学会","subitem_publisher_language":"ja"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/2000770/files/IPSJ-TOD1801006.pdf","label":"IPSJ-TOD1801006.pdf"},"date":[{"dateType":"Available","dateValue":"2027-01-28"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-TOD1801006.pdf","filesize":[{"value":"1.3 MB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"660","billingrole":"5"},{"tax":["include_tax"],"price":"330","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"13"},{"tax":["include_tax"],"price":"0","billingrole":"39"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"d9026eb0-eb38-4e82-904d-774cf5e3ee2f","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2025 by the Information Processing Society of Japan"}]},"item_3_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"洪,惠珍"}]},{"creatorNames":[{"creatorName":"木村,優介"}]},{"creatorNames":[{"creatorName":"波多野,賢治"}]}]},"item_3_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Hyejin Hong","creatorNameLang":"en"}]},{"creatorNames":[{"creatorName":"Yusuke Kimura","creatorNameLang":"en"}]},{"creatorNames":[{"creatorName":"Kenji Hatano","creatorNameLang":"en"}]}]},"item_3_source_id_9":{"attribute_name":"書誌レコードID","attribute_value_mlt":[{"subitem_source_identifier":"AA11464847","subitem_source_identifier_type":"NCID"}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_6501","resourcetype":"journal article"}]},"item_3_source_id_11":{"attribute_name":"ISSN","attribute_value_mlt":[{"subitem_source_identifier":"1882-7799","subitem_source_identifier_type":"ISSN"}]},"item_3_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"推薦システムは,インターネット上に存在する膨大なデータの中から各ユーザに必要な情報を提示することで,検索に要する時間や労力を減らすために利用されるものである.近年は,ユーザがとったこれまでの行動データから次の行動を予測するタスクである系列推薦が注目されているが,これには複数種の行動データを利用する必要がある.そのため,複数の行動データを同時に学習するマルチタスク学習を系列推薦システムに適用することで,タスク間の相乗効果によって推薦性能の向上が図られる.ECサイトにおける商品推薦では,強化学習アルゴリズムにマルチタスク学習を行う深層学習モデルを組み込んだ推薦システムが高い性能を出している.しかし,既存手法で扱うアルゴリズムや数式は当該ドメインに合わせた設計であるため,別ドメインへの適用には多大な開発コストがかかる.そこで本論文では,あらゆるドメインの系列データに対応可能なマルチタスク強化学習を用いた系列推薦システムを実現することで,より低い開発コストで系列推薦システムの開発を可能にする.","subitem_description_type":"Other"}]},"item_3_description_8":{"attribute_name":"論文抄録(英)","attribute_value_mlt":[{"subitem_description":"Recommender systems are used to reduce the time and effort required for searching valuable information. In recent years, sequential recommendation techniqes predict a user's the next action based on their previous behavior. Therefore, there is a growing focus on applying multi-task learning, which learns multiple types of behavioral data simultaneously, to sequential recommender systems, as this can improve recommendation performance through synergy between tasks. In product recommendations on e-commerce sites, recommender systems incorporating deep learning models that perform multi-task learning with reinforcement learning algorithms have achieved high performance. However, the algorithms and mathematical formulas used in existing methods are designed for the relevant domain, so applying them to a different domain would require significant development costs. Therefore, this paper aims to develop sequential recommender systems at a lower cost by realizing a sequential recommender system that uses multi-task reinforcement learning to handle sequence data from any domain.","subitem_description_type":"Other"}]},"item_3_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicPageEnd":"33","bibliographic_titles":[{"bibliographic_title":"情報処理学会論文誌データベース(TOD)"}],"bibliographicPageStart":"24","bibliographicIssueDates":{"bibliographicIssueDate":"2025-01-28","bibliographicIssueDateType":"Issued"},"bibliographicIssueNumber":"1","bibliographicVolumeNumber":"18"}]},"relation_version_is_last":true,"weko_creator_id":"80578"},"id":2000770,"updated":"2025-03-12T08:09:30.439191+00:00","links":{}}