@CONFERENCE\{IMM2000-04834, author = "L. K. Hansen", title = "Bayesian Averaging is Well-Temperated", year = "2000", pages = "265-271", booktitle = "Advances in Neural Information Processing Systems 1999", volume = "", series = "", editor = "S. Solla et al.", publisher = "{MIT} Press", organization = "", address = "", url = "http://www2.compute.dtu.dk/pubdb/pubs/4834-full.html", abstract = "Bayesian predictions are stochastic just like predictions of any other inference scheme that generalize from a finite sample. While a simple variational argument shows that Bayes averaging is generalization optimal given that the prior matches the teacher parameter distribution the situation is less clear if the teacher distribution is unknown. I define a class of averaging procedures, the temperated likelihoods, including both Bayes averaging with a uniform prior and maximum likelihood estimation as special cases. I show that Bayes is generalization optimal in this family for any teacher distribution for two learning problems that are analytically tractable learning the mean of a Gaussian and asymptotics of smooth learners." }