2015
Elvira, Victor; Martino, Luca; Luengo, David; Bugallo, Monica F
Efficient Multiple Importance Sampling Estimators Artículo de revista
En: IEEE Signal Processing Letters, vol. 22, no 10, pp. 1757–1761, 2015, ISSN: 1070-9908.
Resumen | Enlaces | BibTeX | Etiquetas: Adaptive importance sampling, classical mixture approach, computational complexity, Computational efficiency, Computer Simulation, deterministic mixture, estimation theory, Journal, Monte Carlo methods, multiple importance sampling, multiple importance sampling estimator, partial deterministic mixture MIS estimator, Proposals, signal sampling, Sociology, Standards, variance reduction, weight calculation
@article{Elvira2015bb,
title = {Efficient Multiple Importance Sampling Estimators},
author = {Victor Elvira and Luca Martino and David Luengo and Monica F Bugallo},
url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=7105865},
doi = {10.1109/LSP.2015.2432078},
issn = {1070-9908},
year = {2015},
date = {2015-10-01},
journal = {IEEE Signal Processing Letters},
volume = {22},
number = {10},
pages = {1757--1761},
publisher = {IEEE},
abstract = {Multiple importance sampling (MIS) methods use a set of proposal distributions from which samples are drawn. Each sample is then assigned an importance weight that can be obtained according to different strategies. This work is motivated by the trade-off between variance reduction and computational complexity of the different approaches (classical vs. deterministic mixture) available for the weight calculation. A new method that achieves an efficient compromise between both factors is introduced in this letter. It is based on forming a partition of the set of proposal distributions and computing the weights accordingly. Computer simulations show the excellent performance of the associated partial deterministic mixture MIS estimator.},
keywords = {Adaptive importance sampling, classical mixture approach, computational complexity, Computational efficiency, Computer Simulation, deterministic mixture, estimation theory, Journal, Monte Carlo methods, multiple importance sampling, multiple importance sampling estimator, partial deterministic mixture MIS estimator, Proposals, signal sampling, Sociology, Standards, variance reduction, weight calculation},
pubstate = {published},
tppubtype = {article}
}
2012
Taborda, Camilo G; Perez-Cruz, Fernando
Mutual Information and Relative Entropy over the Binomial and Negative Binomial Channels Proceedings Article
En: 2012 IEEE International Symposium on Information Theory Proceedings, pp. 696–700, IEEE, Cambridge, MA, 2012, ISSN: 2157-8095.
Resumen | Enlaces | BibTeX | Etiquetas: Channel estimation, conditional mean estimation, Entropy, Estimation, estimation theoretical quantity, estimation theory, Gaussian channel, Gaussian channels, information theory concept, loss function, mean square error methods, Mutual information, negative binomial channel, Poisson channel, Random variables, relative entropy
@inproceedings{Taborda2012a,
title = {Mutual Information and Relative Entropy over the Binomial and Negative Binomial Channels},
author = {Camilo G Taborda and Fernando Perez-Cruz},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6284304},
issn = {2157-8095},
year = {2012},
date = {2012-01-01},
booktitle = {2012 IEEE International Symposium on Information Theory Proceedings},
pages = {696--700},
publisher = {IEEE},
address = {Cambridge, MA},
abstract = {We study the relation of the mutual information and relative entropy over the Binomial and Negative Binomial channels with estimation theoretical quantities, in which we extend already known results for Gaussian and Poisson channels. We establish general expressions for these information theory concepts with a direct connection with estimation theory through the conditional mean estimation and a particular loss function.},
keywords = {Channel estimation, conditional mean estimation, Entropy, Estimation, estimation theoretical quantity, estimation theory, Gaussian channel, Gaussian channels, information theory concept, loss function, mean square error methods, Mutual information, negative binomial channel, Poisson channel, Random variables, relative entropy},
pubstate = {published},
tppubtype = {inproceedings}
}