2021
Pérez-Vieites, Sara; Míguez, Joaquín
Nested Gaussian filters for recursive Bayesian inference and nonlinear tracking in state space models Artículo de revista
En: Signal Processing, vol. 189, pp. 108295, 2021, ISSN: 0165-1684.
Resumen | Enlaces | BibTeX | Etiquetas: Bayesian inference, Filtering, Kalman, Monte Carlo, Parameter estimation
@article{PEREZVIEITES2021108295,
title = {Nested Gaussian filters for recursive Bayesian inference and nonlinear tracking in state space models},
author = {Sara P\'{e}rez-Vieites and Joaqu\'{i}n M\'{i}guez},
url = {https://www.sciencedirect.com/science/article/pii/S0165168421003327},
doi = {https://doi.org/10.1016/j.sigpro.2021.108295},
issn = {0165-1684},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {Signal Processing},
volume = {189},
pages = {108295},
abstract = {We introduce a new sequential methodology to calibrate the fixed parameters and track the stochastic dynamical variables of a state-space system. The proposed method is based on the nested hybrid filtering (NHF) framework of [1], that combines two layers of filters, one inside the other, to compute the joint posterior probability distribution of the static parameters and the state variables. In particular, we explore the use of deterministic sampling techniques for Gaussian approximation in the first layer of the algorithm, instead of the Monte Carlo methods employed in the original procedure. The resulting scheme reduces the computational cost and so makes the algorithms potentially better-suited for high-dimensional state and parameter spaces. We describe a specific instance of the new method and then study its performance and efficiency of the resulting algorithms for a stochastic Lorenz 63 model and for a stochastic volatility model with real data.},
keywords = {Bayesian inference, Filtering, Kalman, Monte Carlo, Parameter estimation},
pubstate = {published},
tppubtype = {article}
}
2018
Crisan, Dan; Míguez, Joaquín
Nested particle filters for online parameter estimation in discrete-time state-space Markov models Artículo de revista
En: Bernoulli, vol. 24, no 4A, pp. 3039 – 3086, 2018.
Enlaces | BibTeX | Etiquetas: error bounds, model inference, Monte Carlo, Parameter estimation, Particle filtering, recursive algorithms, State space models
@article{10.3150/17-BEJ954,
title = {Nested particle filters for online parameter estimation in discrete-time state-space Markov models},
author = {Dan Crisan and Joaqu\'{i}n M\'{i}guez},
url = {https://doi.org/10.3150/17-BEJ954},
doi = {10.3150/17-BEJ954},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
journal = {Bernoulli},
volume = {24},
number = {4A},
pages = {3039 -- 3086},
publisher = {Bernoulli Society for Mathematical Statistics and Probability},
keywords = {error bounds, model inference, Monte Carlo, Parameter estimation, Particle filtering, recursive algorithms, State space models},
pubstate = {published},
tppubtype = {article}
}
2015
Elvira, Victor; Martino, Luca; Luengo, David; Corander, Jukka
A Gradient Adaptive Population Importance Sampler Proceedings Article
En: 2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 4075–4079, IEEE, Brisbane, 2015, ISBN: 978-1-4673-6997-8.
Resumen | Enlaces | BibTeX | Etiquetas: adaptive extensions, adaptive importance sampler, Adaptive importance sampling, gradient adaptive population, gradient matrix, Hamiltonian Monte Carlo, Hessian matrices, Hessian matrix, learning (artificial intelligence), Machine learning, MC methods, Monte Carlo, Monte Carlo methods, population Monte Carlo (PMC), proposal densities, Signal processing, Sociology, statistics, target distribution
@inproceedings{Elvira2015a,
title = {A Gradient Adaptive Population Importance Sampler},
author = {Victor Elvira and Luca Martino and David Luengo and Jukka Corander},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7178737 http://www.tsc.uc3m.es/~velvira/papers/ICASSP2015_elvira.pdf},
doi = {10.1109/ICASSP.2015.7178737},
isbn = {978-1-4673-6997-8},
year = {2015},
date = {2015-04-01},
booktitle = {2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},
pages = {4075--4079},
publisher = {IEEE},
address = {Brisbane},
abstract = {Monte Carlo (MC) methods are widely used in signal processing and machine learning. A well-known class of MC methods is composed of importance sampling and its adaptive extensions (e.g., population Monte Carlo). In this paper, we introduce an adaptive importance sampler using a population of proposal densities. The novel algorithm dynamically optimizes the cloud of proposals, adapting them using information about the gradient and Hessian matrix of the target distribution. Moreover, a new kind of interaction in the adaptation of the proposal densities is introduced, establishing a trade-off between attaining a good performance in terms of mean square error and robustness to initialization.},
keywords = {adaptive extensions, adaptive importance sampler, Adaptive importance sampling, gradient adaptive population, gradient matrix, Hamiltonian Monte Carlo, Hessian matrices, Hessian matrix, learning (artificial intelligence), Machine learning, MC methods, Monte Carlo, Monte Carlo methods, population Monte Carlo (PMC), proposal densities, Signal processing, Sociology, statistics, target distribution},
pubstate = {published},
tppubtype = {inproceedings}
}