2018
Míguez, Joaquín; Mariño, Inés P.; Vázquez, Manuel A
Analysis of a nonlinear importance sampling scheme for Bayesian parameter estimation in state-space models Artículo de revista
En: Signal Processing, vol. 142, pp. 281-291, 2018, ISSN: 0165-1684.
Resumen | Enlaces | BibTeX | Etiquetas: Adaptive importance sampling, Bayesian inference, Importance sampling, Parameter estimation, population Monte Carlo, State space models
@article{MIGUEZ2018281,
title = {Analysis of a nonlinear importance sampling scheme for Bayesian parameter estimation in state-space models},
author = {Joaqu\'{i}n M\'{i}guez and In\'{e}s P. Mari\~{n}o and Manuel A V\'{a}zquez},
url = {https://www.sciencedirect.com/science/article/pii/S0165168417302761},
doi = {https://doi.org/10.1016/j.sigpro.2017.07.030},
issn = {0165-1684},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
journal = {Signal Processing},
volume = {142},
pages = {281-291},
abstract = {The Bayesian estimation of the unknown parameters of state-space (dynamical) systems has received considerable attention over the past decade, with a handful of powerful algorithms being introduced. In this paper we tackle the theoretical analysis of the recently proposed nonlinear population Monte Carlo (NPMC). This is an iterative importance sampling scheme whose key features, compared to conventional importance samplers, are (i) the approximate computation of the importance weights (IWs) assigned to the Monte Carlo samples and (ii) the nonlinear transformation of these IWs in order to prevent the degeneracy problem that flaws the performance of conventional importance samplers. The contribution of the present paper is a rigorous proof of convergence of the nonlinear IS (NIS) scheme as the number of Monte Carlo samples, M, increases. Our analysis reveals that the NIS approximation errors converge to 0 almost surely and with the optimal Monte Carlo rate of M−12. Moreover, we prove that this is achieved even when the mean estimation error of the IWs remains constant, a property that has been termed exact approximation in the Markov chain Monte Carlo literature. We illustrate these theoretical results by means of a computer simulation example involving the estimation of the parameters of a state-space model typically used for target tracking.},
keywords = {Adaptive importance sampling, Bayesian inference, Importance sampling, Parameter estimation, population Monte Carlo, State space models},
pubstate = {published},
tppubtype = {article}
}
Míguez, Joaquín; Mariño, Inés P.; Vázquez, Manuel A
Analysis of a nonlinear importance sampling scheme for Bayesian parameter estimation in state-space models Artículo de revista
En: Signal Processing, vol. 142, pp. 281-291, 2018, ISSN: 0165-1684.
Resumen | Enlaces | BibTeX | Etiquetas: Adaptive importance sampling, Bayesian inference, Importance sampling, Parameter estimation, population Monte Carlo, State space models
@article{MIGUEZ2018281b,
title = {Analysis of a nonlinear importance sampling scheme for Bayesian parameter estimation in state-space models},
author = {Joaqu\'{i}n M\'{i}guez and In\'{e}s P. Mari\~{n}o and Manuel A V\'{a}zquez},
url = {https://www.sciencedirect.com/science/article/pii/S0165168417302761},
doi = {https://doi.org/10.1016/j.sigpro.2017.07.030},
issn = {0165-1684},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
journal = {Signal Processing},
volume = {142},
pages = {281-291},
abstract = {The Bayesian estimation of the unknown parameters of state-space (dynamical) systems has received considerable attention over the past decade, with a handful of powerful algorithms being introduced. In this paper we tackle the theoretical analysis of the recently proposed nonlinear population Monte Carlo (NPMC). This is an iterative importance sampling scheme whose key features, compared to conventional importance samplers, are (i) the approximate computation of the importance weights (IWs) assigned to the Monte Carlo samples and (ii) the nonlinear transformation of these IWs in order to prevent the degeneracy problem that flaws the performance of conventional importance samplers. The contribution of the present paper is a rigorous proof of convergence of the nonlinear IS (NIS) scheme as the number of Monte Carlo samples, M, increases. Our analysis reveals that the NIS approximation errors converge to 0 almost surely and with the optimal Monte Carlo rate of M−12. Moreover, we prove that this is achieved even when the mean estimation error of the IWs remains constant, a property that has been termed exact approximation in the Markov chain Monte Carlo literature. We illustrate these theoretical results by means of a computer simulation example involving the estimation of the parameters of a state-space model typically used for target tracking.},
keywords = {Adaptive importance sampling, Bayesian inference, Importance sampling, Parameter estimation, population Monte Carlo, State space models},
pubstate = {published},
tppubtype = {article}
}
2017
Elvira, Victor; Martino, Luca; Luengo, David; Bugallo, Monica F
Improving Population Monte Carlo: Alternative Weighting and Resampling Schemes Artículo de revista
En: Signal Processing, vol. 131, pp. 77–91, 2017, ISSN: 01651684.
Resumen | Enlaces | BibTeX | Etiquetas: Adaptive importance sampling, Journal, population Monte Carlo, Proposal distribution, Resampling
@article{Elvira2017,
title = {Improving Population Monte Carlo: Alternative Weighting and Resampling Schemes},
author = {Victor Elvira and Luca Martino and David Luengo and Monica F Bugallo},
url = {http://www.sciencedirect.com/science/article/pii/S0165168416301633},
doi = {10.1016/j.sigpro.2016.07.012},
issn = {01651684},
year = {2017},
date = {2017-02-01},
journal = {Signal Processing},
volume = {131},
pages = {77--91},
abstract = {Population Monte Carlo (PMC) sampling methods are powerful tools for approximating distributions of static unknowns given a set of observations. These methods are iterative in nature: at each step they generate samples from a proposal distribution and assign them weights according to the importance sampling principle. Critical issues in applying PMC methods are the choice of the generating functions for the samples and the avoidance of the sample degeneracy. In this paper, we propose three new schemes that considerably improve the performance of the original PMC formulation by allowing for better exploration of the space of unknowns and by selecting more adequately the surviving samples. A theoretical analysis is performed, proving the superiority of the novel schemes in terms of variance of the associated estimators and preservation of the sample diversity. Furthermore, we show that they outperform other state of the art algorithms (both in terms of mean square error and robustness w.r.t. initialization) through extensive numerical simulations.},
keywords = {Adaptive importance sampling, Journal, population Monte Carlo, Proposal distribution, Resampling},
pubstate = {published},
tppubtype = {article}
}
2015
Elvira, Victor; Martino, Luca; Luengo, David; Bugallo, Monica F
Efficient Multiple Importance Sampling Estimators Artículo de revista
En: IEEE Signal Processing Letters, vol. 22, no 10, pp. 1757–1761, 2015, ISSN: 1070-9908.
Resumen | Enlaces | BibTeX | Etiquetas: Adaptive importance sampling, classical mixture approach, computational complexity, Computational efficiency, Computer Simulation, deterministic mixture, estimation theory, Journal, Monte Carlo methods, multiple importance sampling, multiple importance sampling estimator, partial deterministic mixture MIS estimator, Proposals, signal sampling, Sociology, Standards, variance reduction, weight calculation
@article{Elvira2015bb,
title = {Efficient Multiple Importance Sampling Estimators},
author = {Victor Elvira and Luca Martino and David Luengo and Monica F Bugallo},
url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=7105865},
doi = {10.1109/LSP.2015.2432078},
issn = {1070-9908},
year = {2015},
date = {2015-10-01},
journal = {IEEE Signal Processing Letters},
volume = {22},
number = {10},
pages = {1757--1761},
publisher = {IEEE},
abstract = {Multiple importance sampling (MIS) methods use a set of proposal distributions from which samples are drawn. Each sample is then assigned an importance weight that can be obtained according to different strategies. This work is motivated by the trade-off between variance reduction and computational complexity of the different approaches (classical vs. deterministic mixture) available for the weight calculation. A new method that achieves an efficient compromise between both factors is introduced in this letter. It is based on forming a partition of the set of proposal distributions and computing the weights accordingly. Computer simulations show the excellent performance of the associated partial deterministic mixture MIS estimator.},
keywords = {Adaptive importance sampling, classical mixture approach, computational complexity, Computational efficiency, Computer Simulation, deterministic mixture, estimation theory, Journal, Monte Carlo methods, multiple importance sampling, multiple importance sampling estimator, partial deterministic mixture MIS estimator, Proposals, signal sampling, Sociology, Standards, variance reduction, weight calculation},
pubstate = {published},
tppubtype = {article}
}
Martino, Luca; Elvira, Victor; Luengo, David; Corander, Jukka
Parallel interacting Markov adaptive importance sampling Proceedings Article
En: 2015 23rd European Signal Processing Conference (EUSIPCO), pp. 499–503, IEEE, Nice, 2015, ISBN: 978-0-9928-6263-3.
Resumen | Enlaces | BibTeX | Etiquetas: Adaptive importance sampling, Bayesian inference, MCMC methods, Monte Carlo methods, Parallel Chains, Probability density function, Proposals, Signal processing, Signal processing algorithms, Sociology
@inproceedings{Martino2015bb,
title = {Parallel interacting Markov adaptive importance sampling},
author = {Luca Martino and Victor Elvira and David Luengo and Jukka Corander},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7362433 http://www.eurasip.org/Proceedings/Eusipco/Eusipco2015/papers/1570111267.pdf},
doi = {10.1109/EUSIPCO.2015.7362433},
isbn = {978-0-9928-6263-3},
year = {2015},
date = {2015-08-01},
booktitle = {2015 23rd European Signal Processing Conference (EUSIPCO)},
pages = {499--503},
publisher = {IEEE},
address = {Nice},
abstract = {Monte Carlo (MC) methods are widely used for statistical inference in signal processing applications. A well-known class of MC methods is importance sampling (IS) and its adaptive extensions. In this work, we introduce an iterated importance sampler using a population of proposal densities, which are adapted according to an MCMC technique over the population of location parameters. The novel algorithm provides a global estimation of the variables of interest iteratively, using all the samples weighted according to the deterministic mixture scheme. Numerical results, on a multi-modal example and a localization problem in wireless sensor networks, show the advantages of the proposed schemes.},
keywords = {Adaptive importance sampling, Bayesian inference, MCMC methods, Monte Carlo methods, Parallel Chains, Probability density function, Proposals, Signal processing, Signal processing algorithms, Sociology},
pubstate = {published},
tppubtype = {inproceedings}
}
Martino, Luca; Elvira, Victor; Luengo, David; Corander, Jukka
An Adaptive Population Importance Sampler: Learning From Uncertainty Artículo de revista
En: IEEE Transactions on Signal Processing, vol. 63, no 16, pp. 4422–4437, 2015, ISSN: 1053-587X.
Resumen | Enlaces | BibTeX | Etiquetas: Adaptive importance sampling, adaptive multiple IS, adaptive population importance sampler, AMIS, APIS, Estimation, Importance sampling, IS estimators, iterative estimation, iterative methods, Journal, MC methods, Monte Carlo (MC) methods, Monte Carlo methods, population Monte Carlo, Proposals, Signal processing algorithms, simple temporal adaptation, Sociology, Standards, Wireless sensor network, Wireless Sensor Networks
@article{Martino2015bbb,
title = {An Adaptive Population Importance Sampler: Learning From Uncertainty},
author = {Luca Martino and Victor Elvira and David Luengo and Jukka Corander},
url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=7117437},
doi = {10.1109/TSP.2015.2440215},
issn = {1053-587X},
year = {2015},
date = {2015-08-01},
journal = {IEEE Transactions on Signal Processing},
volume = {63},
number = {16},
pages = {4422--4437},
publisher = {IEEE},
abstract = {Monte Carlo (MC) methods are well-known computational techniques, widely used in different fields such as signal processing, communications and machine learning. An important class of MC methods is composed of importance sampling (IS) and its adaptive extensions, such as population Monte Carlo (PMC) and adaptive multiple IS (AMIS). In this paper, we introduce a novel adaptive and iterated importance sampler using a population of proposal densities. The proposed algorithm, named adaptive population importance sampling (APIS), provides a global estimation of the variables of interest iteratively, making use of all the samples previously generated. APIS combines a sophisticated scheme to build the IS estimators (based on the deterministic mixture approach) with a simple temporal adaptation (based on epochs). In this way, APIS is able to keep all the advantages of both AMIS and PMC, while minimizing their drawbacks. Furthermore, APIS is easily parallelizable. The cloud of proposals is adapted in such a way that local features of the target density can be better taken into account compared to single global adaptation procedures. The result is a fast, simple, robust, and high-performance algorithm applicable to a wide range of problems. Numerical results show the advantages of the proposed sampling scheme in four synthetic examples and a localization problem in a wireless sensor network.},
keywords = {Adaptive importance sampling, adaptive multiple IS, adaptive population importance sampler, AMIS, APIS, Estimation, Importance sampling, IS estimators, iterative estimation, iterative methods, Journal, MC methods, Monte Carlo (MC) methods, Monte Carlo methods, population Monte Carlo, Proposals, Signal processing algorithms, simple temporal adaptation, Sociology, Standards, Wireless sensor network, Wireless Sensor Networks},
pubstate = {published},
tppubtype = {article}
}
Elvira, Victor; Martino, Luca; Luengo, David; Corander, Jukka
A Gradient Adaptive Population Importance Sampler Proceedings Article
En: 2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 4075–4079, IEEE, Brisbane, 2015, ISBN: 978-1-4673-6997-8.
Resumen | Enlaces | BibTeX | Etiquetas: adaptive extensions, adaptive importance sampler, Adaptive importance sampling, gradient adaptive population, gradient matrix, Hamiltonian Monte Carlo, Hessian matrices, Hessian matrix, learning (artificial intelligence), Machine learning, MC methods, Monte Carlo, Monte Carlo methods, population Monte Carlo (PMC), proposal densities, Signal processing, Sociology, statistics, target distribution
@inproceedings{Elvira2015a,
title = {A Gradient Adaptive Population Importance Sampler},
author = {Victor Elvira and Luca Martino and David Luengo and Jukka Corander},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7178737 http://www.tsc.uc3m.es/~velvira/papers/ICASSP2015_elvira.pdf},
doi = {10.1109/ICASSP.2015.7178737},
isbn = {978-1-4673-6997-8},
year = {2015},
date = {2015-04-01},
booktitle = {2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},
pages = {4075--4079},
publisher = {IEEE},
address = {Brisbane},
abstract = {Monte Carlo (MC) methods are widely used in signal processing and machine learning. A well-known class of MC methods is composed of importance sampling and its adaptive extensions (e.g., population Monte Carlo). In this paper, we introduce an adaptive importance sampler using a population of proposal densities. The novel algorithm dynamically optimizes the cloud of proposals, adapting them using information about the gradient and Hessian matrix of the target distribution. Moreover, a new kind of interaction in the adaptation of the proposal densities is introduced, establishing a trade-off between attaining a good performance in terms of mean square error and robustness to initialization.},
keywords = {adaptive extensions, adaptive importance sampler, Adaptive importance sampling, gradient adaptive population, gradient matrix, Hamiltonian Monte Carlo, Hessian matrices, Hessian matrix, learning (artificial intelligence), Machine learning, MC methods, Monte Carlo, Monte Carlo methods, population Monte Carlo (PMC), proposal densities, Signal processing, Sociology, statistics, target distribution},
pubstate = {published},
tppubtype = {inproceedings}
}