@ARTICLE\{IMM2015-06270, author = "J. S. Vestergaard and A. A. Nielsen", title = "Canonical Information Analysis", year = "2015", month = "jan", keywords = "Information theory, probability density function estimation, Parzen windows, entropy, mutual information maximization, canonical mutual information analysis, {CIA,} approximate entropy.", pages = "1-9", journal = "{ISPRS} Journal of Photogrammetry and Remote Sensing", volume = "101", editor = "", number = "", publisher = "Elsevier", note = "Matlab code at https://github.com/schackv/cia", url = "http://authors.elsevier.com/a/1QAnN3I9x1EeMt", abstract = "Canonical correlation analysis is an established multivariate statistical method in which correlation between linear combinations of multivariate sets of variables is maximized. In canonical information analysis introduced here, linear correlation as a measure of association between variables is replaced by the information theoretical, entropy based measure mutual information, which is a much more general measure of association. We make canonical information analysis feasible for large sample problems, including for example multispectral images, due to the use of a fast kernel density estimator for entropy estimation. Canonical information analysis is applied successfully to 1) simple simulated data to illustrate the basic idea and evaluate performance, 2) fusion of weather radar and optical geostationary satellite data in a situation with heavy precipitation, and 3) change detection in optical airborne data. The simulation study shows that canonical information analysis is as accurate as and much faster than algorithms presented in previous work, especially for large sample sizes.", isbn_issn = "10.1016/j.isprsjprs.2014.11.002" }