@techreport{oai:ipsj.ixsq.nii.ac.jp:00211705, author = {松原, 崇 and 宮武, 勇登 and 谷口, 隆晴 and Graduate, School of Engineering Science Osaka University and Cybermedia Center Osaka University and Graduate School of System Informatics Kobe University}, issue = {2}, month = {Jun}, note = {ニューラルネットワークで微分方程式を学習する neural ODE は,連続時間のダイナミカルシステムや確率分布を,高い精度でモデル化できる.しかし同じニューラルネットワークを何度も使うため,誤差逆伝播法で訓練するには非常に大きなメモリが必要になる.そのため数値積分で誤差逆伝播法を行う随伴法が用いられるが,数値誤差か大きな計算コストのどちらかが問題となる.本研究では随伴法に適切なチェックポイント法とシンプレクティック 数値積分法を用いることで,省メモリ性と速度を両立させる手法を提案する., A differential equation model using neural networks, neural ODE, enables use to model a continuous-time dynamics and probabilistic model with high accuracy. However, the neural ODE uses the same neural network repeatedly, the training using the backpropagation algorithm consumes large memory. Instead of the backpropagation algorithm, the adjoint method is commonly used, which obtains the gradient using the numerical integration. The adjoint method needs a small step size and much computational cost to suppress the numerical errors. In this study, we combine the checkpointing scheme and symplectic integrator for the adjoint method. It suppresses the memory consumption and functions faster.}, title = {シンプレクティック数値積分法を用いたNeural ODE の学習}, year = {2021} }