@CONFERENCE\{IMM2002-01259, author = "J. Quiñonero-Candela and O. Winther", title = "Incremental Gaussian Processes", year = "2002", keywords = "Gaussian Processes, Incremental Methods, Bayesian Kernel Methods, Mean Field Classification, Computational Complexity", booktitle = "Advances in Neural Processing Systems", volume = "", series = "", editor = "", publisher = "", organization = "", address = "", url = "http://www2.compute.dtu.dk/pubdb/pubs/1259-full.html", abstract = "In this paper, we consider Tipping's relevance vector machine (RVM) and formalize an incremental training strategy as a variant of the expectation-maximization (EM) algorithm that we call subspace {EM}. Working with a subset of active basis functions, the sparsity of the {RVM} solution will ensure that the number of basis functions and thereby the computational complexity is kept low. We also introduce a mean field approach to the intractable classification model that is expected to give a very good approximation to exact Bayesian inference and contains the Laplace approximation as a special case. We test the algorithms on two large data sets with 10\^3-10\^4 examples. The results indicate that Bayesian learning of large data sets, e.g. the {MNIST} database is realistic." }