@techreport{oai:ipsj.ixsq.nii.ac.jp:00234922, author = {中村, 匠海 and 種田, 祥吾 and 山内, ゆかり and Takumi, Nnakamura and Shogo, Taneda and Yukari, Yamauchi}, issue = {35}, month = {Jun}, note = {Gao Huang らは,先行する全ての層の特徴マップを入力とする Dense Convolutional Network (DenseNet) を提案した.Wenqi Liu らは DenseNet の入力とする特徴マップ数を sparse 化し計算量を削減する,Sparse DenseNet (SparseNet)を提案した.さらに,各ブロックの層数を段階的に増加させることで,計算量の削減と精度の向上を達成した.これらのモデルでは,一層が新たに生み出すマップ数を成長率と呼んだ.本研究では,成長率を段階的に増加させ,path も増加していくようなネットワークにする.これにより SparseNet の更なる計算量削減と精度の向上を狙う., Gao Huang et al. proposed a Dense Convolutional Network (DenseNet) that takes the feature maps of all previous layers as input. Wenqi Liu et al. proposed Sparse DenseNet (SparseNet), which reduces the skip connections and number of feature maps from DenseNet. Furthermore, by increasing the number of layers in each block in stages, SparseNet achieved a reduction of the computation costs and improved accuracy. In these models, the number of new maps produced by a layer was called the growth rate. In this research, we propose a network model in which the growth rate and the number of paths increases gradually. Through this, we aim to further reduce calculation and improve the accuracy of SparseNet.}, title = {段階的に成長するSparseNet}, year = {2024} }