2014
Taborda, Camilo G; Perez-Cruz, Fernando; Guo, Dongning
New Information-Estimation Results for Poisson, Binomial and Negative Binomial Models Proceedings Article
En: 2014 IEEE International Symposium on Information Theory, pp. 2207–2211, IEEE, Honolulu, 2014, ISBN: 978-1-4799-5186-4.
Resumen | Enlaces | BibTeX | Etiquetas: Bregman divergence, Estimation, estimation measures, Gaussian models, Gaussian processes, information measures, information theory, information-estimation results, negative binomial models, Poisson models, Stochastic processes
@inproceedings{Taborda2014,
title = {New Information-Estimation Results for Poisson, Binomial and Negative Binomial Models},
author = {Camilo G Taborda and Fernando Perez-Cruz and Dongning Guo},
url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=6875225},
doi = {10.1109/ISIT.2014.6875225},
isbn = {978-1-4799-5186-4},
year = {2014},
date = {2014-06-01},
booktitle = {2014 IEEE International Symposium on Information Theory},
pages = {2207--2211},
publisher = {IEEE},
address = {Honolulu},
abstract = {In recent years, a number of mathematical relationships have been established between information measures and estimation measures for various models, including Gaussian, Poisson and binomial models. In this paper, it is shown that the second derivative of the input-output mutual information with respect to the input scaling can be expressed as the expectation of a certain Bregman divergence pertaining to the conditional expectations of the input and the input power. This result is similar to that found for the Gaussian model where the Bregman divergence therein is the square distance. In addition, the Poisson, binomial and negative binomial models are shown to be similar in the small scaling regime in the sense that the derivative of the mutual information and the derivative of the relative entropy converge to the same value.},
keywords = {Bregman divergence, Estimation, estimation measures, Gaussian models, Gaussian processes, information measures, information theory, information-estimation results, negative binomial models, Poisson models, Stochastic processes},
pubstate = {published},
tppubtype = {inproceedings}
}
2008
Perez-Cruz, Fernando
Kullback-Leibler Divergence Estimation of Continuous Distributions Proceedings Article
En: 2008 IEEE International Symposium on Information Theory, pp. 1666–1670, IEEE, Toronto, 2008, ISBN: 978-1-4244-2256-2.
Resumen | Enlaces | BibTeX | Etiquetas: Convergence, density estimation, Density measurement, Entropy, Frequency estimation, H infinity control, information theory, k-nearest-neighbour density estimation, Kullback-Leibler divergence estimation, Machine learning, Mutual information, neuroscience, Random variables, statistical distributions, waiting-times distributions
@inproceedings{Perez-Cruz2008,
title = {Kullback-Leibler Divergence Estimation of Continuous Distributions},
author = {Fernando Perez-Cruz},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4595271},
isbn = {978-1-4244-2256-2},
year = {2008},
date = {2008-01-01},
booktitle = {2008 IEEE International Symposium on Information Theory},
pages = {1666--1670},
publisher = {IEEE},
address = {Toronto},
abstract = {We present a method for estimating the KL divergence between continuous densities and we prove it converges almost surely. Divergence estimation is typically solved estimating the densities first. Our main result shows this intermediate step is unnecessary and that the divergence can be either estimated using the empirical cdf or k-nearest-neighbour density estimation, which does not converge to the true measure for finite k. The convergence proof is based on describing the statistics of our estimator using waiting-times distributions, as the exponential or Erlang. We illustrate the proposed estimators and show how they compare to existing methods based on density estimation, and we also outline how our divergence estimators can be used for solving the two-sample problem.},
keywords = {Convergence, density estimation, Density measurement, Entropy, Frequency estimation, H infinity control, information theory, k-nearest-neighbour density estimation, Kullback-Leibler divergence estimation, Machine learning, Mutual information, neuroscience, Random variables, statistical distributions, waiting-times distributions},
pubstate = {published},
tppubtype = {inproceedings}
}