@article{oai:ipsj.ixsq.nii.ac.jp:00217588,
 author = {Kazuki, Iwahana and Naoto, Yanai and Jason, Paul Cruz and Toru, Fujiwara and Kazuki, Iwahana and Naoto, Yanai and Jason, Paul Cruz and Toru, Fujiwara},
 issue = {3},
 journal = {情報処理学会論文誌},
 month = {Mar},
 note = {Achieving differential privacy and utilizing secure multiparty computation are the two primary approaches used for ensuring privacy in privacy-preserving machine learning. However, the privacy guarantee by existing integration protocols of both approaches for collaborative learning weakens when more participants join the protocols. In this work, we present Secure and Private Gradient Computation (SPGC), a novel collaborative learning framework with a strong privacy guarantee independent of the number of participants while still providing high accuracy. The main idea of SPGC is to create noise for the differential privacy within secure multiparty computation. We also created an implementation of SPGC and used it in experiments to measure its accuracy and training time. The results show that SPGC is more accurate than a naive protocol based on local differential privacy by up to 5.6%. We experimentally show that the training time increases in proportion to the noise generation and then demonstrate that the privacy guarantee is independent of the number of participants as well as the accuracy evaluation.
------------------------------
This is a preprint of an article intended for publication Journal of
Information Processing(JIP). This preprint should not be cited. This
article should be cited as: Journal of Information Processing Vol.30(2022) (online)
DOI http://dx.doi.org/10.2197/ipsjjip.30.209
------------------------------, Achieving differential privacy and utilizing secure multiparty computation are the two primary approaches used for ensuring privacy in privacy-preserving machine learning. However, the privacy guarantee by existing integration protocols of both approaches for collaborative learning weakens when more participants join the protocols. In this work, we present Secure and Private Gradient Computation (SPGC), a novel collaborative learning framework with a strong privacy guarantee independent of the number of participants while still providing high accuracy. The main idea of SPGC is to create noise for the differential privacy within secure multiparty computation. We also created an implementation of SPGC and used it in experiments to measure its accuracy and training time. The results show that SPGC is more accurate than a naive protocol based on local differential privacy by up to 5.6%. We experimentally show that the training time increases in proportion to the noise generation and then demonstrate that the privacy guarantee is independent of the number of participants as well as the accuracy evaluation.
------------------------------
This is a preprint of an article intended for publication Journal of
Information Processing(JIP). This preprint should not be cited. This
article should be cited as: Journal of Information Processing Vol.30(2022) (online)
DOI http://dx.doi.org/10.2197/ipsjjip.30.209
------------------------------},
 title = {SPGC: Integration of Secure Multiparty Computation and Differential Privacy for Gradient Computation on Collaborative Learning},
 volume = {63},
 year = {2022}
}