2014
Taborda, Camilo G; Perez-Cruz, Fernando; Guo, Dongning
New Information-Estimation Results for Poisson, Binomial and Negative Binomial Models Artículo en actas
En: 2014 IEEE International Symposium on Information Theory, pp. 2207–2211, IEEE, Honolulu, 2014, ISBN: 978-1-4799-5186-4.
Resumen | Enlaces | BibTeX | Etiquetas: Bregman divergence, Estimation, estimation measures, Gaussian models, Gaussian processes, information measures, information theory, information-estimation results, negative binomial models, Poisson models, Stochastic processes
@inproceedings{Taborda2014,
title = {New Information-Estimation Results for Poisson, Binomial and Negative Binomial Models},
author = {Camilo G Taborda and Fernando Perez-Cruz and Dongning Guo},
url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=6875225},
doi = {10.1109/ISIT.2014.6875225},
isbn = {978-1-4799-5186-4},
year = {2014},
date = {2014-06-01},
booktitle = {2014 IEEE International Symposium on Information Theory},
pages = {2207--2211},
publisher = {IEEE},
address = {Honolulu},
abstract = {In recent years, a number of mathematical relationships have been established between information measures and estimation measures for various models, including Gaussian, Poisson and binomial models. In this paper, it is shown that the second derivative of the input-output mutual information with respect to the input scaling can be expressed as the expectation of a certain Bregman divergence pertaining to the conditional expectations of the input and the input power. This result is similar to that found for the Gaussian model where the Bregman divergence therein is the square distance. In addition, the Poisson, binomial and negative binomial models are shown to be similar in the small scaling regime in the sense that the derivative of the mutual information and the derivative of the relative entropy converge to the same value.},
keywords = {Bregman divergence, Estimation, estimation measures, Gaussian models, Gaussian processes, information measures, information theory, information-estimation results, negative binomial models, Poisson models, Stochastic processes},
pubstate = {published},
tppubtype = {inproceedings}
}
In recent years, a number of mathematical relationships have been established between information measures and estimation measures for various models, including Gaussian, Poisson and binomial models. In this paper, it is shown that the second derivative of the input-output mutual information with respect to the input scaling can be expressed as the expectation of a certain Bregman divergence pertaining to the conditional expectations of the input and the input power. This result is similar to that found for the Gaussian model where the Bregman divergence therein is the square distance. In addition, the Poisson, binomial and negative binomial models are shown to be similar in the small scaling regime in the sense that the derivative of the mutual information and the derivative of the relative entropy converge to the same value.