{"updated":"2025-01-19T07:34:42.359148+00:00","metadata":{"_oai":{"id":"oai:ipsj.ixsq.nii.ac.jp:00241690","sets":["1164:1579:11464:11813"]},"path":["11813"],"owner":"44499","recid":"241690","title":["A survey of sparse structures in the multi-layer perceptron of large language models"],"pubdate":{"attribute_name":"公開日","attribute_value":"2024-12-09"},"_buckets":{"deposit":"359c2f22-b78c-4941-a6fa-66bfda3db806"},"_deposit":{"id":"241690","pid":{"type":"depid","value":"241690","revision_id":0},"owners":[44499],"status":"published","created_by":44499},"item_title":"A survey of sparse structures in the multi-layer perceptron of large language models","author_link":["665818","665816","665819","665815","665820","665817","665822","665821"],"item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"A survey of sparse structures in the multi-layer perceptron of large language models"},{"subitem_title":"A survey of sparse structures in the multi-layer perceptron of large language models","subitem_title_language":"en"}]},"item_keyword":{"attribute_name":"キーワード","attribute_value_mlt":[{"subitem_subject":"省電力","subitem_subject_scheme":"Other"}]},"item_type_id":"4","publish_date":"2024-12-09","item_4_text_3":{"attribute_name":"著者所属","attribute_value_mlt":[{"subitem_text_value":"Fujitsu Ltd."},{"subitem_text_value":"University of Kyushu"},{"subitem_text_value":"Fujitsu Ltd."},{"subitem_text_value":"Fujitsu Ltd."}]},"item_4_text_4":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_value":"Fujitsu Ltd.","subitem_text_language":"en"},{"subitem_text_value":"University of Kyushu","subitem_text_language":"en"},{"subitem_text_value":"Fujitsu Ltd.","subitem_text_language":"en"},{"subitem_text_value":"Fujitsu Ltd.","subitem_text_language":"en"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"eng"}]},"item_publisher":{"attribute_name":"出版者","attribute_value_mlt":[{"subitem_publisher":"情報処理学会","subitem_publisher_language":"ja"}]},"publish_status":"0","weko_shared_id":-1,"item_file_price":{"attribute_name":"Billing file","attribute_type":"file","attribute_value_mlt":[{"url":{"url":"https://ipsj.ixsq.nii.ac.jp/record/241690/files/IPSJ-ARC24259025.pdf","label":"IPSJ-ARC24259025.pdf"},"date":[{"dateType":"Available","dateValue":"2026-12-09"}],"format":"application/pdf","billing":["billing_file"],"filename":"IPSJ-ARC24259025.pdf","filesize":[{"value":"1.0 MB"}],"mimetype":"application/pdf","priceinfo":[{"tax":["include_tax"],"price":"660","billingrole":"5"},{"tax":["include_tax"],"price":"330","billingrole":"6"},{"tax":["include_tax"],"price":"0","billingrole":"16"},{"tax":["include_tax"],"price":"0","billingrole":"44"}],"accessrole":"open_date","version_id":"a2e073b1-d0fe-4f66-a308-15cb49166919","displaytype":"detail","licensetype":"license_note","license_note":"Copyright (c) 2024 by the Information Processing Society of Japan"}]},"item_4_creator_5":{"attribute_name":"著者名","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Sameer, Deshmukh"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Mingchuan, Lyu"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Hiroki, Tokura"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Takumi, Honda"}],"nameIdentifiers":[{}]}]},"item_4_creator_6":{"attribute_name":"著者名(英)","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Sameer, Deshmukh","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Mingchuan, Lyu","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Hiroki, Tokura","creatorNameLang":"en"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Takumi, Honda","creatorNameLang":"en"}],"nameIdentifiers":[{}]}]},"item_4_source_id_9":{"attribute_name":"書誌レコードID","attribute_value_mlt":[{"subitem_source_identifier":"AN10096105","subitem_source_identifier_type":"NCID"}]},"item_4_textarea_12":{"attribute_name":"Notice","attribute_value_mlt":[{"subitem_textarea_value":"SIG Technical Reports are nonrefereed and hence may later appear in any journals, conferences, symposia, etc."}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourceuri":"http://purl.org/coar/resource_type/c_18gh","resourcetype":"technical report"}]},"item_4_source_id_11":{"attribute_name":"ISSN","attribute_value_mlt":[{"subitem_source_identifier":"2188-8574","subitem_source_identifier_type":"ISSN"}]},"item_4_description_7":{"attribute_name":"論文抄録","attribute_value_mlt":[{"subitem_description":"Large language models using the transformer architecture require massive computational resources for training to acceptable levels of accuracy. Recent advances have shown that the MLP layers within such models can be pruned to up to 90% sparsity to reduce the computational requirement of training and inference. However, achieving high performance for the sparse matrix multiplication remains a challenge on GPUs. Several approaches have been suggested for improving the performance of sparse matrix multiplication using structured sparsity. In this paper, we first survey and benchmark some of the sparsity structures that can be applied to dense matrices, and then examine the training loss curves of a 162M Mistral model using various structures of sparsity. Our results show promising future directions for research in improving the training time of transformers using sparsity.","subitem_description_type":"Other"}]},"item_4_description_8":{"attribute_name":"論文抄録(英)","attribute_value_mlt":[{"subitem_description":"Large language models using the transformer architecture require massive computational resources for training to acceptable levels of accuracy. Recent advances have shown that the MLP layers within such models can be pruned to up to 90% sparsity to reduce the computational requirement of training and inference. However, achieving high performance for the sparse matrix multiplication remains a challenge on GPUs. Several approaches have been suggested for improving the performance of sparse matrix multiplication using structured sparsity. In this paper, we first survey and benchmark some of the sparsity structures that can be applied to dense matrices, and then examine the training loss curves of a 162M Mistral model using various structures of sparsity. Our results show promising future directions for research in improving the training time of transformers using sparsity.","subitem_description_type":"Other"}]},"item_4_biblio_info_10":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicPageEnd":"6","bibliographic_titles":[{"bibliographic_title":"研究報告システム・アーキテクチャ(ARC)"}],"bibliographicPageStart":"1","bibliographicIssueDates":{"bibliographicIssueDate":"2024-12-09","bibliographicIssueDateType":"Issued"},"bibliographicIssueNumber":"25","bibliographicVolumeNumber":"2024-ARC-259"}]},"relation_version_is_last":true,"weko_creator_id":"44499"},"created":"2025-01-19T01:46:26.621567+00:00","id":241690,"links":{}}