@inproceedings{oai:ipsj.ixsq.nii.ac.jp:00216177, author = {Taisei, Yamana and Yuko, Hara-Azumi and Taisei, Yamana and Yuko, Hara-Azumi}, book = {Proceedings of Asia Pacific Conference on Robot IoT System Development and Platform}, month = {Jan}, note = {Machine learning is now required to be built on embedded systems to realize edge-AI devices, where not only weight reduction but also accuracy degradation that stems from domain shift need to be addressed. This paper proposes Stepwise Cross-Domain Distillation (SCDD) that employs unsupervised domain adaptation for lightweight models. By distilling knowledge from a pre-domain-adapted large model stepwisely through a teaching assistant model, the final lightweight student model can effectively achieve good accuracy in a target domain. We also provide insights obtained through quantitative evaluations to improve stepwise knowledge distillation in various domain shifts.Code is available at https://github.com/TaiseiYamana/SCDD.git, Machine learning is now required to be built on embedded systems to realize edge-AI devices, where not only weight reduction but also accuracy degradation that stems from domain shift need to be addressed. This paper proposes Stepwise Cross-Domain Distillation (SCDD) that employs unsupervised domain adaptation for lightweight models. By distilling knowledge from a pre-domain-adapted large model stepwisely through a teaching assistant model, the final lightweight student model can effectively achieve good accuracy in a target domain. We also provide insights obtained through quantitative evaluations to improve stepwise knowledge distillation in various domain shifts.Code is available at https://github.com/TaiseiYamana/SCDD.git}, pages = {1--7}, publisher = {情報処理学会}, title = {Edge Domain Adaptation through Stepwise Cross-Domain Distillation}, volume = {2021}, year = {2022} }