@techreport{oai:ipsj.ixsq.nii.ac.jp:00077342, author = {木村, 圭吾 and 吉田, 哲也 and Keigo, Kimura and Tetsuya, Yoshida}, issue = {2}, month = {Sep}, note = {本稿では,特徴表現のスパース制約を考慮した NMF (Non-negative Matrix Factorization) を提案する.近年,要素が非負である実行列を,同じく要素が非負である実行列の積として表現する非負値行列分解 (NMF) が注目を集めている.従来の研究では NMF における非負性制約が非零の要素が少ないスパースな特徴表現の学習に寄与すると考えられ,またスパース制約を導入した手法も提案されているが,これまで特徴表現のスパース性は明示的には考慮されてこなかった.本稿では NMF における特徴表現に着目し,特徴表現のスパース性を独立性と相関から定式化し,定式化したスパース性を正則化項として活用する手法を提案する.提案法を文書クラスタリングに適用し,従来法との比較を通じて提案法の有効性を示す., We propose an approach for Non-negative Matrix Factorization (NMF) with sparseness constraints on features. It has been believed that the non-negativity constraint in NMF contributes to making the learned features sparse. In addition, several approaches incorporated additional sparseness constraints, by hoping that the constraints make the features more sparse. However, previous approaches have mostly focused on coefficients, and have not considered the sparsity of features explicitly. Our approach explicitly incorporates the sparsity of features, in terms of independence of features and correlation of features. The proposed notion of sparsity is formalized as regularization terms in the framework of NMF, and learning algorithms with multiplicative update rules are proposed. The proposed approach is evaluated in terms of document clustering over well-known benchmark datasets. Several experiments have been conducted on the datasets, and comparison with other state-of-the-art NMF algorithms is reported. The results are encouraging and show that the proposed approach improves the clustering performance, while sustaining relatively good quality of data approximation.}, title = {特徴表現のスパース性を考慮したNMF}, year = {2011} }