@inproceedings{oai:ipsj.ixsq.nii.ac.jp:00216195, author = {Naoya, Yokota and Yuko, Hara-Azumi and Naoya, Yokota and Yuko, Hara-Azumi}, book = {Proceedings of Asia Pacific Conference on Robot IoT System Development and Platform}, month = {Jan}, note = {Although a Gossip Stochastic Gradient Descent (SGD) algorithm is known to be suitable for decentralized distributed machine learning, it has a non-convergence problem for heterogeneous datasets between multiple devices. In this paper, we propose a Gossip Swap SGD to address this problem by employing a weight swapping method between devices. Our evaluation demonstrated that our proposed method successfully improves higher accuracy without increasing computation load than the original Gossip SGD., Although a Gossip Stochastic Gradient Descent (SGD) algorithm is known to be suitable for decentralized distributed machine learning, it has a non-convergence problem for heterogeneous datasets between multiple devices. In this paper, we propose a Gossip Swap SGD to address this problem by employing a weight swapping method between devices. Our evaluation demonstrated that our proposed method successfully improves higher accuracy without increasing computation load than the original Gossip SGD.}, pages = {94--95}, publisher = {情報処理学会}, title = {Weight Exchange in Decentralized Distributed Machine Learning for Resource-Constrained IoT Edges}, volume = {2021}, year = {2022} }