@TECHREPORT\{IMM2002-01261, author = "J. Quiñonero-Candela", title = "Extended Linear Models with Gaussian Priors", year = "2002", keywords = "linear models, bayes, Gaussian processes, Relevance Vector Machine, Expectation-Maximization algorithm", number = "", series = "", institution = "Informatics and Mathematical Modelling, Technical University of Denmark", address = "Richard Petersens Plads, Building 321, {DK-}2800 Kongens Lyngby, Denmark", type = "", url = "http://www2.compute.dtu.dk/pubdb/pubs/1261-full.html", abstract = "In extended linear models the input space is projected onto a feature space by means of an arbitrary non-linear transformation. A linear model is then applied to the feature space to construct the model output. The dimension of the feature space can be very large, or even infinite, giving the model a very big flexibility. Support Vector Machines (SVM's) and Gaussian processes are two examples of such models. In this technical report I present a model in which the dimension of the feature space remains finite, and where a Bayesian approach is used to train the model with Gaussian priors on the parameters. The Relevance Vector Machine, introduced by Tipping, is a particular case of such a model. I give the detailed derivations of the expectation-maximisation (EM) algorithm used in the training. These derivations are not found in the literature, and might be helpful for newcomers." }