@MASTERSTHESIS\{IMM2007-05217, author = "M. Arngren", title = "Modelling cognitive representations", year = "2007", keywords = "Cognitive Component Analysis, Generative Models, Matrix Factorization, Mixture Models, {EM-}algorithm, Deep Network models", school = "Informatics and Mathematical Modelling, Technical University of Denmark, {DTU}", address = "Richard Petersens Plads, Building 321, {DK-}2800 Kgs. Lyngby", type = "", note = "Supervised by Assoc. Prof. Ole Winther, {IMM,} {DTU}.", url = "http://www2.compute.dtu.dk/pubdb/pubs/5217-full.html", abstract = "This thesis analyzes the modelling of visual cognitive representations based on extracting cognitive components from the {MNIST} dataset of handwritten digits using simple unsupervised linear and non-linear matrix factorizations both non-negative and unconstrained based on gradient descent learning. We introduce two different classes of generative models for modelling the cognitive data: Mixture Models and Deep Network Models. Mixture models based on {K-}Means, Guassian and factor analyzer kernel functions are presented as simple generative models in a general framework. From simulations we analyze the generative properties of these models and show how they render insufficient to proper model the complex distribution of the visual cognitive data. Motivated by the introduction of deep belief nets by Hinton et al. [12] we propose a simpler generative deep network model based on cognitive components. A theoretical framework is presented as individual modules for building a generative hierarchical network model. We analyze the performance in terms of classification and generation of {MNIST} digits and show how our simplifications compared to Hinton et al. [12] leads to degraded performance. In this respect we outline the differences and conjecture obvious improvements." }