@techreport{oai:ipsj.ixsq.nii.ac.jp:00201552,
 author = {Jiaqi, Feng and Mizuho, Iwaihara and Jiaqi, Feng and Mizuho, Iwaihara},
 issue = {7},
 month = {Dec},
 note = {Multi-label text classification is a variant of the classification problem where each document is associated with multiple labels, while traditional text classification aims to assign a single label to each document. There exist some previous studies on this problem that have made remarkable achievements, however, existing methods suffer from the lack of labeled data and tend to ignore the interdependencies among labels. In this paper, we propose a method to improve the performance of multilabel text classification given a small set of labeled data. Our method utilizes both the labeled data and unlabeled data to exploit coarse correlations between labels and generates pseudo documents to make up for the deficiency of labeled data, which are used to pre-train our model. Meanwhile, we apply a hierarchical attentional sequence generation model to capture the correlations
between labels at a finer granularity, which will be finetuned via self-training. Experiments show the improvement of our method when given weak supervisions., Multi-label text classification is a variant of the classification problem where each document is associated with multiple labels, while traditional text classification aims to assign a single label to each document. There exist some previous studies on this problem that have made remarkable achievements, however, existing methods suffer from the lack of labeled data and tend to ignore the interdependencies among labels. In this paper, we propose a method to improve the performance of multilabel text classification given a small set of labeled data. Our method utilizes both the labeled data and unlabeled data to exploit coarse correlations between labels and generates pseudo documents to make up for the deficiency of labeled data, which are used to pre-train our model. Meanwhile, we apply a hierarchical attentional sequence generation model to capture the correlations
between labels at a finer granularity, which will be finetuned via self-training. Experiments show the improvement of our method when given weak supervisions.},
 title = {Weakly Supervised Multi-Label Text Classification},
 year = {2019}
}