@INPROCEEDINGS{DetetabilityLoss_IJCAI_2016, author={Alippi, Cesare and Boracchi, Giacomo and Carrera, Diego and Roveri, Manuel}, booktitle={in Proc. 2016 International Joint Conference on Artificial Intelligence 2016}, title = {Change Detection in Multivariate Datastreams: Likelihood and Detectability Loss.}, year={2016}, pages = {1368-1374}, abstract={We address the problem of detecting changes in multivariate datastreams, and we investigate the intrinsic difficulty that change-detection methods have to face when the data dimension scales. In particular, we consider a general approach where changes are detected by comparing the distribution of the log-likelihood of the datastream over different time windows. Despite the fact that this approach constitutes the frame of several change-detection methods, its effectiveness when data dimension scales has never been investigated, which is indeed the goal of our paper. We show that the magnitude of the change can be naturally measured by the symmetric Kullback-Leibler divergence between the pre- and post-change distributions, and that the detectability of a change of a given magnitude worsens when the data dimension increases. This problem, which we refer to as \emph{detectability loss}, is due to the linear relationship between the variance of the log-likelihood and the data dimension. We analytically derive the detectability loss on Gaussian-distributed datastreams, and empirically demonstrate that this problem holds also on real-world datasets and that can be harmful even at low data-dimensions (say, 10).}, month={July}, url = {http://www.ijcai.org/Proceedings/16/Papers/197.pdf}, Note = {http://arxiv.org/abs/1510.04850} }