### 2012

Taborda, Camilo G; Perez-Cruz, Fernando

Derivative of the Relative Entropy over the Poisson and Binomial Channel Inproceedings

In: 2012 IEEE Information Theory Workshop, pp. 386–390, IEEE, Lausanne, 2012, ISBN: 978-1-4673-0223-4.

Abstract | Links | BibTeX | Tags: binomial channel, binomial distribution, Channel estimation, conditional distribution, Entropy, Estimation, function expectation, Mutual information, mutual information concept, Poisson channel, Poisson distribution, Random variables, relative entropy derivative, similar expression

@inproceedings{Taborda2012,

title = {Derivative of the Relative Entropy over the Poisson and Binomial Channel},

author = {Camilo G Taborda and Fernando Perez-Cruz},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6404699},

isbn = {978-1-4673-0223-4},

year = {2012},

date = {2012-01-01},

booktitle = {2012 IEEE Information Theory Workshop},

pages = {386--390},

publisher = {IEEE},

address = {Lausanne},

abstract = {In this paper it is found that, regardless of the statistics of the input, the derivative of the relative entropy over the Binomial channel can be seen as the expectation of a function that has as argument the mean of the conditional distribution that models the channel. Based on this relationship we formulate a similar expression for the mutual information concept. In addition to this, using the connection between the Binomial and Poisson distribution we develop similar results for the Poisson channel. Novelty of the results presented here lies on the fact that, expressions obtained can be applied to a wide range of scenarios.},

keywords = {binomial channel, binomial distribution, Channel estimation, conditional distribution, Entropy, Estimation, function expectation, Mutual information, mutual information concept, Poisson channel, Poisson distribution, Random variables, relative entropy derivative, similar expression},

pubstate = {published},

tppubtype = {inproceedings}

}

Taborda, Camilo G; Perez-Cruz, Fernando

Mutual Information and Relative Entropy over the Binomial and Negative Binomial Channels Inproceedings

In: 2012 IEEE International Symposium on Information Theory Proceedings, pp. 696–700, IEEE, Cambridge, MA, 2012, ISSN: 2157-8095.

Abstract | Links | BibTeX | Tags: Channel estimation, conditional mean estimation, Entropy, Estimation, estimation theoretical quantity, estimation theory, Gaussian channel, Gaussian channels, information theory concept, loss function, mean square error methods, Mutual information, negative binomial channel, Poisson channel, Random variables, relative entropy

@inproceedings{Taborda2012a,

title = {Mutual Information and Relative Entropy over the Binomial and Negative Binomial Channels},

author = {Camilo G Taborda and Fernando Perez-Cruz},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6284304},

issn = {2157-8095},

year = {2012},

date = {2012-01-01},

booktitle = {2012 IEEE International Symposium on Information Theory Proceedings},

pages = {696--700},

publisher = {IEEE},

address = {Cambridge, MA},

abstract = {We study the relation of the mutual information and relative entropy over the Binomial and Negative Binomial channels with estimation theoretical quantities, in which we extend already known results for Gaussian and Poisson channels. We establish general expressions for these information theory concepts with a direct connection with estimation theory through the conditional mean estimation and a particular loss function.},

keywords = {Channel estimation, conditional mean estimation, Entropy, Estimation, estimation theoretical quantity, estimation theory, Gaussian channel, Gaussian channels, information theory concept, loss function, mean square error methods, Mutual information, negative binomial channel, Poisson channel, Random variables, relative entropy},

pubstate = {published},

tppubtype = {inproceedings}

}