@ARTICLE\{IMM2008-04658, author = "M. M{\o}rup and L. K. Hansen and S. M. Arnfred", title = "Algorithms for Sparse Non-negative {TUCKER}", year = "2008", month = "aug", keywords = "Tucker, {PARAFAC,} Sparse coding, Higher Order Non-negative Matrix Factorization (HONMF)", pages = "2112-2131", journal = "Neural Computation", volume = "20", editor = "", number = "8", publisher = "", url = "http://www2.compute.dtu.dk/pubdb/pubs/4658-full.html", abstract = "The analysis of large scale data of more modalities than two, i.e. tensor, has lately become a eld of growing attention. To analyze such data, decomposition techniques are widely used. The two most common decompositions for tensors are the {TUCKER} model and the more restricted {PARAFAC} model. Both models can be viewed as generalizations of the regular factor analysis to data of more than two modalities. Non-negative matrix factorization, (NMF), in conjunction with sparse coding has lately been given much attention due to its part based and easy interpretable representation. While {NMF} has been extended to the {PARAFAC} model no such attempt has been done to extend {NMF} to the {TUCKER} model. However, if the tensor data analyzed is non-negative it may well be relevant to consider purely additive, i.e. non-negative {TUCKER} decompositions. To reduce ambiguities of this type of decomposition we develop updates that can impose sparseness in any combination of modalities. Hence, form algorithms for sparse non-negative {TUCKER} decompositions, (SN-TUCKER). We demonstrate how the proposed algorithms are superior to existing algorithms for {TUCKER} decompositions when indeed the data and interactions can be considered non-negative. We further illustrate how sparse coding can help identify what model, i.e. {PARAFAC} or {TUCKER,} is the most appropriate to the data as well as to select the number of components by turning of excess components. The algorithms for {SN-TUCKER} are available." }