{"metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:00216177","sets":["6164:6165:9654:10851"]},"path":["10851"],"owner":"44499","recid":"216177","title":["Edge Domain Adaptation through Stepwise Cross-Domain Distillation"],"pubdate":{"attribute_name":"公開日","attribute_value":"2022-01-28"},"_buckets":{"deposit":"d1c377d9-4533-4fdf-91ef-460c4ea21b39"},"_deposit":{"id":"216177","pid":{"type":"depid","value":"216177","revision_id":0},"owners":[44499],"status":"published","created_by":44499},"item_title":"Edge Domain Adaptation through Stepwise Cross-Domain Distillation","author_link":["557807","557806","557808","557805"],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"Edge Domain Adaptation through Stepwise Cross-Domain Distillation"},{"subitem_title":"Edge Domain Adaptation through Stepwise Cross-Domain Distillation","subitem_title_language":"en"}]},"item_type_id":"18","publish_date":"2022-01-28","item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"eng"}]},"item_18_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"Tokyo Institute of Technology"},{"subitem_text_value":"Tokyo Institute of Technology"}]},"item_18_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"Tokyo Institute of Technology","subitem_text_language":"en"},{"subitem_text_value":"Tokyo Institute of Technology","subitem_text_language":"en"}]},"item_publisher":{"attribute_name":"出版者","attribute_value_mlt":[{"subitem_publisher":"情報処理学会","subitem_publisher_language":"ja"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/216177/files/IPSJ-APRIS2021001.pdf","label":"IPSJ-APRIS2021001.pdf"},"date":[{"dateType":"Available","dateValue":"2024-01-28"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-APRIS2021001.pdf","filesize":[{"value":"2.3 MB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"0","billingrole":"5"},{"tax":["include_tax"],"price":"0","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"42"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"9cbcabcd-9b73-4734-b087-044d503cd21a","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2022 by the Information Processing Society of Japan"}]},"item_18_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Taisei, Yamana"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Yuko, Hara-Azumi"}],"nameIdentifiers":[{}]}]},"item_18_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Taisei, Yamana","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Yuko, Hara-Azumi","creatorNameLang":"en"}],"nameIdentifiers":[{}]}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_5794","resourcetype":"conference paper"}]},"item_18_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"Machine learning is now required to be built on embedded systems to realize edge-AI devices, where not only weight reduction but also accuracy degradation that stems from domain shift need to be addressed. This paper proposes Stepwise Cross-Domain Distillation (SCDD) that employs unsupervised domain adaptation for lightweight models. By distilling knowledge from a pre-domain-adapted large model stepwisely through a teaching assistant model, the final lightweight student model can effectively achieve good accuracy in a target domain. We also provide insights obtained through quantitative evaluations to improve stepwise knowledge distillation in various domain shifts.Code is available at https://github.com/TaiseiYamana/SCDD.git","subitem_description_type":"Other"}]},"item_18_description_8":{"attribute_name":"論文抄録(英)","attribute_value_mlt":[{"subitem_description":"Machine learning is now required to be built on embedded systems to realize edge-AI devices, where not only weight reduction but also accuracy degradation that stems from domain shift need to be addressed. This paper proposes Stepwise Cross-Domain Distillation (SCDD) that employs unsupervised domain adaptation for lightweight models. By distilling knowledge from a pre-domain-adapted large model stepwisely through a teaching assistant model, the final lightweight student model can effectively achieve good accuracy in a target domain. We also provide insights obtained through quantitative evaluations to improve stepwise knowledge distillation in various domain shifts.Code is available at https://github.com/TaiseiYamana/SCDD.git","subitem_description_type":"Other"}]},"item_18_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicPageEnd":"7","bibliographic_titles":[{"bibliographic_title":"Proceedings of Asia Pacific Conference on Robot IoT System Development and Platform"}],"bibliographicPageStart":"1","bibliographicIssueDates":{"bibliographicIssueDate":"2022-01-28","bibliographicIssueDateType":"Issued"},"bibliographicVolumeNumber":"2021"}]},"relation_version_is_last":true,"weko_creator_id":"44499"},"id":216177,"updated":"2025-01-19T15:54:06.357487+00:00","links":{},"created":"2025-01-19T01:16:53.351428+00:00"}