### 2015

Luengo, David; Martino, Luca; Elvira, Victor; Bugallo, Monica F

Bias correction for distributed Bayesian estimators Inproceedings

In: 2015 IEEE 6th International Workshop on Computational Advances in Multi-Sensor Adaptive Processing (CAMSAP), pp. 253–256, IEEE, Cancun, 2015, ISBN: 978-1-4799-1963-5.

Abstract | Links | BibTeX | Tags: Bayes methods, Big data, Distributed databases, Estimation, Probability density function, Wireless Sensor Networks

@inproceedings{Luengo2015a,

title = {Bias correction for distributed Bayesian estimators},

author = {David Luengo and Luca Martino and Victor Elvira and Monica F Bugallo},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7383784},

doi = {10.1109/CAMSAP.2015.7383784},

isbn = {978-1-4799-1963-5},

year = {2015},

date = {2015-12-01},

booktitle = {2015 IEEE 6th International Workshop on Computational Advances in Multi-Sensor Adaptive Processing (CAMSAP)},

pages = {253--256},

publisher = {IEEE},

address = {Cancun},

abstract = {Dealing with the whole dataset in big data estimation problems is usually unfeasible. A common solution then consists of dividing the data into several smaller sets, performing distributed Bayesian estimation and combining these partial estimates to obtain a global estimate. A major problem of this approach is the presence of a non-negligible bias in the partial estimators, due to the mismatch between the unknown true prior and the prior assumed in the estimation. A simple method to mitigate the effect of this bias is proposed in this paper. Essentially, the approach is based on using a reference data set to obtain a rough estimation of the parameter of interest, i.e., a reference parameter. This information is then communicated to the partial filters that handle the smaller data sets, which can thus use a refined prior centered around this parameter. Simulation results confirm the good performance of this scheme.},

keywords = {Bayes methods, Big data, Distributed databases, Estimation, Probability density function, Wireless Sensor Networks},

pubstate = {published},

tppubtype = {inproceedings}

}

Martino, Luca; Elvira, Victor; Luengo, David; Corander, Jukka

Parallel interacting Markov adaptive importance sampling Inproceedings

In: 2015 23rd European Signal Processing Conference (EUSIPCO), pp. 499–503, IEEE, Nice, 2015, ISBN: 978-0-9928-6263-3.

Abstract | Links | BibTeX | Tags: Adaptive importance sampling, Bayesian inference, MCMC methods, Monte Carlo methods, Parallel Chains, Probability density function, Proposals, Signal processing, Signal processing algorithms, Sociology

@inproceedings{Martino2015bb,

title = {Parallel interacting Markov adaptive importance sampling},

author = {Luca Martino and Victor Elvira and David Luengo and Jukka Corander},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7362433 http://www.eurasip.org/Proceedings/Eusipco/Eusipco2015/papers/1570111267.pdf},

doi = {10.1109/EUSIPCO.2015.7362433},

isbn = {978-0-9928-6263-3},

year = {2015},

date = {2015-08-01},

booktitle = {2015 23rd European Signal Processing Conference (EUSIPCO)},

pages = {499--503},

publisher = {IEEE},

address = {Nice},

abstract = {Monte Carlo (MC) methods are widely used for statistical inference in signal processing applications. A well-known class of MC methods is importance sampling (IS) and its adaptive extensions. In this work, we introduce an iterated importance sampler using a population of proposal densities, which are adapted according to an MCMC technique over the population of location parameters. The novel algorithm provides a global estimation of the variables of interest iteratively, using all the samples weighted according to the deterministic mixture scheme. Numerical results, on a multi-modal example and a localization problem in wireless sensor networks, show the advantages of the proposed schemes.},

keywords = {Adaptive importance sampling, Bayesian inference, MCMC methods, Monte Carlo methods, Parallel Chains, Probability density function, Proposals, Signal processing, Signal processing algorithms, Sociology},

pubstate = {published},

tppubtype = {inproceedings}

}

Martino, Luca; Elvira, Victor; Luengo, David; Artés-Rodríguez, Antonio; Corander, Jukka

Smelly Parallel MCMC Chains Inproceedings

In: 2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 4070–4074, IEEE, Brisbane, 2015, ISBN: 978-1-4673-6997-8.

Abstract | Links | BibTeX | Tags: Bayesian inference, learning (artificial intelligence), Machine learning, Markov chain Monte Carlo, Markov chain Monte Carlo algorithms, Markov processes, MC methods, MCMC algorithms, MCMC scheme, mean square error, mean square error methods, Monte Carlo methods, optimisation, parallel and interacting chains, Probability density function, Proposals, robustness, Sampling methods, Signal processing, Signal processing algorithms, signal sampling, smelly parallel chains, smelly parallel MCMC chains, Stochastic optimization

@inproceedings{Martino2015a,

title = {Smelly Parallel MCMC Chains},

author = {Luca Martino and Victor Elvira and David Luengo and Antonio Artés-Rodríguez and Jukka Corander},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7178736 http://www.tsc.uc3m.es/~velvira/papers/ICASSP2015_martino.pdf},

doi = {10.1109/ICASSP.2015.7178736},

isbn = {978-1-4673-6997-8},

year = {2015},

date = {2015-04-01},

booktitle = {2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},

pages = {4070--4074},

publisher = {IEEE},

address = {Brisbane},

abstract = {Monte Carlo (MC) methods are useful tools for Bayesian inference and stochastic optimization that have been widely applied in signal processing and machine learning. A well-known class of MC methods are Markov Chain Monte Carlo (MCMC) algorithms. In this work, we introduce a novel parallel interacting MCMC scheme, where the parallel chains share information, thus yielding a faster exploration of the state space. The interaction is carried out generating a dynamic repulsion among the “smelly” parallel chains that takes into account the entire population of current states. The ergodicity of the scheme and its relationship with other sampling methods are discussed. Numerical results show the advantages of the proposed approach in terms of mean square error, robustness w.r.t. to initial values and parameter choice.},

keywords = {Bayesian inference, learning (artificial intelligence), Machine learning, Markov chain Monte Carlo, Markov chain Monte Carlo algorithms, Markov processes, MC methods, MCMC algorithms, MCMC scheme, mean square error, mean square error methods, Monte Carlo methods, optimisation, parallel and interacting chains, Probability density function, Proposals, robustness, Sampling methods, Signal processing, Signal processing algorithms, signal sampling, smelly parallel chains, smelly parallel MCMC chains, Stochastic optimization},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2013

Koblents, Eugenia; Miguez, Joaquin

A Population Monte Carlo Scheme for Computational Inference in High Dimensional Spaces Inproceedings

In: 2013 IEEE International Conference on Acoustics, Speech and Signal Processing, pp. 6318–6322, IEEE, Vancouver, 2013, ISSN: 1520-6149.

Abstract | Links | BibTeX | Tags: Approximation methods, computational inference, degeneracy of importance weights, high dimensional spaces, Importance sampling, importance weights, iterative importance sampling, iterative methods, mixture-PMC, mixture-PMC algorithm, Monte Carlo methods, MPMC, nonlinear transformations, population Monte Carlo, population Monte Carlo scheme, Probability density function, probability distributions, Proposals, Sociology, Standards

@inproceedings{Koblents2013a,

title = {A Population Monte Carlo Scheme for Computational Inference in High Dimensional Spaces},

author = {Eugenia Koblents and Joaquin Miguez},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6638881},

issn = {1520-6149},

year = {2013},

date = {2013-01-01},

booktitle = {2013 IEEE International Conference on Acoustics, Speech and Signal Processing},

pages = {6318--6322},

publisher = {IEEE},

address = {Vancouver},

abstract = {In this paper we address the Monte Carlo approximation of integrals with respect to probability distributions in high-dimensional spaces. In particular, we investigate the population Monte Carlo (PMC) scheme, which is based on an iterative importance sampling (IS) approach. Both IS and PMC suffer from the well known problem of degeneracy of the importance weights (IWs), which is closely related to the curse-of-dimensionality, and limits their applicability in large-scale practical problems. In this paper we investigate a novel PMC scheme that consists in performing nonlinear transformations of the IWs in order to smooth their variations and avoid degeneracy. We apply the modified IS scheme to the well-known mixture-PMC (MPMC) algorithm, which constructs the importance functions as mixtures of kernels. We present numerical results that show how the modified version of MPMC clearly outperforms the original scheme.},

keywords = {Approximation methods, computational inference, degeneracy of importance weights, high dimensional spaces, Importance sampling, importance weights, iterative importance sampling, iterative methods, mixture-PMC, mixture-PMC algorithm, Monte Carlo methods, MPMC, nonlinear transformations, population Monte Carlo, population Monte Carlo scheme, Probability density function, probability distributions, Proposals, Sociology, Standards},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2011

Balasingam, Balakumar; Bolic, Miodrag; Djuric, Petar M; Miguez, Joaquin

Efficient Distributed Resampling for Particle Filters Inproceedings

In: 2011 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 3772–3775, IEEE, Prague, 2011, ISSN: 1520-6149.

Abstract | Links | BibTeX | Tags: Approximation algorithms, Copper, Covariance matrix, distributed resampling, Markov processes, Probability density function, Sequential Monte-Carlo methods, Signal processing, Signal processing algorithms

@inproceedings{Balasingam2011,

title = {Efficient Distributed Resampling for Particle Filters},

author = {Balakumar Balasingam and Miodrag Bolic and Petar M Djuric and Joaquin Miguez},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5947172},

issn = {1520-6149},

year = {2011},

date = {2011-01-01},

booktitle = {2011 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},

pages = {3772--3775},

publisher = {IEEE},

address = {Prague},

abstract = {In particle filtering, resampling is the only step that cannot be fully parallelized. Recently, we have proposed algorithms for distributed resampling implemented on architectures with concurrent processing elements (PEs). The objective of distributed resampling is to reduce the communication among the PEs while not compromising the performance of the particle filter. An additional objective for implementation is to reduce the communication among the PEs. In this paper, we report an improved version of the distributed resampling algorithm that optimally selects the particles for communication between the PEs of the distributed scheme. Computer simulations are provided that demonstrate the improved performance of the proposed algorithm.},

keywords = {Approximation algorithms, Copper, Covariance matrix, distributed resampling, Markov processes, Probability density function, Sequential Monte-Carlo methods, Signal processing, Signal processing algorithms},

pubstate = {published},

tppubtype = {inproceedings}

}

Plata-Chaves, Jorge; Lazaro, Marcelino; Artés-Rodríguez, Antonio

Optimal Neyman-Pearson Fusion in Two-Dimensional Densor Networks with Serial Architecture and Dependent Observations Inproceedings

In: Information Fusion (FUSION), 2011 Proceedings of the 14th International Conference on, pp. 1–6, Chicago, 2011, ISBN: 978-1-4577-0267-9.

Abstract | Links | BibTeX | Tags: Bayesian methods, binary distributed detection problem, decision theory, dependent observations, Joints, local decision rule, Measurement uncertainty, Network topology, Neyman-Pearson criterion, optimal Neyman-Pearson fusion, optimum distributed detection, Parallel architectures, Performance evaluation, Probability density function, sensor dependent observations, sensor fusion, serial architecture, serial network topology, two-dimensional sensor networks, Wireless Sensor Networks

@inproceedings{Plata-Chaves2011bb,

title = {Optimal Neyman-Pearson Fusion in Two-Dimensional Densor Networks with Serial Architecture and Dependent Observations},

author = {Jorge Plata-Chaves and Marcelino Lazaro and Antonio Artés-Rodríguez},

url = {http://ieeexplore.ieee.org/xpl/articleDetails.jsp?arnumber=5977545&amp;searchWithin%3Dartes+rodriguez%26sortType%3Dasc_p_Sequence%26filter%3DAND%28p_IS_Number%3A5977431%29},

isbn = {978-1-4577-0267-9},

year = {2011},

date = {2011-01-01},

booktitle = {Information Fusion (FUSION), 2011 Proceedings of the 14th International Conference on},

pages = {1--6},

address = {Chicago},

abstract = {In this correspondence, we consider a sensor network with serial architecture. When solving a binary distributed detection problem where the sensor observations are dependent under each one of the two possible hypothesis, each fusion stage of the network applies a local decision rule. We assume that, based on the information available at each fusion stage, the decision rules provide a binary message regarding the presence or absence of an event of interest. Under this scenario and under a Neyman-Pearson formulation, we derive the optimal decision rules associated with each fusion stage. As it happens when the sensor observations are independent, we are able to show that, under the Neyman-Pearson criterion, the optimal fusion rules of a serial configuration with dependent observations also match optimal Neyman-Pearson tests.},

keywords = {Bayesian methods, binary distributed detection problem, decision theory, dependent observations, Joints, local decision rule, Measurement uncertainty, Network topology, Neyman-Pearson criterion, optimal Neyman-Pearson fusion, optimum distributed detection, Parallel architectures, Performance evaluation, Probability density function, sensor dependent observations, sensor fusion, serial architecture, serial network topology, two-dimensional sensor networks, Wireless Sensor Networks},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2009

Martino, Luca; Miguez, Joaquin

A Novel Rejection Sampling Scheme for Posterior Probability Distributions Inproceedings

In: 2009 IEEE International Conference on Acoustics, Speech and Signal Processing, pp. 2921–2924, IEEE, Taipei, 2009, ISSN: 1520-6149.

Abstract | Links | BibTeX | Tags: Additive noise, arbitrary target probability distributions, Bayes methods, Bayesian methods, Monte Carlo integration, Monte Carlo methods, Monte Carlo techniques, Overbounding, posterior probability distributions, Probability density function, Probability distribution, Proposals, Rejection sampling, rejection sampling scheme, Sampling methods, Signal processing algorithms, signal sampling, Upper bound

@inproceedings{Martino2009,

title = {A Novel Rejection Sampling Scheme for Posterior Probability Distributions},

author = {Luca Martino and Joaquin Miguez},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4960235},

issn = {1520-6149},

year = {2009},

date = {2009-01-01},

booktitle = {2009 IEEE International Conference on Acoustics, Speech and Signal Processing},

pages = {2921--2924},

publisher = {IEEE},

address = {Taipei},

abstract = {Rejection sampling (RS) is a well-known method to draw from arbitrary target probability distributions, which has important applications by itself or as a building block for more sophisticated Monte Carlo techniques. The main limitation to the use of RS is the need to find an adequate upper bound for the ratio of the target probability density function (pdf) over the proposal pdf from which the samples are generated. There are no general methods to analytically find this bound, except in the particular case in which the target pdf is log-concave. In this paper we adopt a Bayesian view of the problem and propose a general RS scheme to draw from the posterior pdf of a signal of interest using its prior density as a proposal function. The method enables the analytical calculation of the bound and can be applied to a large class of target densities. We illustrate its use with a simple numerical example.},

keywords = {Additive noise, arbitrary target probability distributions, Bayes methods, Bayesian methods, Monte Carlo integration, Monte Carlo methods, Monte Carlo techniques, Overbounding, posterior probability distributions, Probability density function, Probability distribution, Proposals, Rejection sampling, rejection sampling scheme, Sampling methods, Signal processing algorithms, signal sampling, Upper bound},

pubstate = {published},

tppubtype = {inproceedings}

}

Martino, Luca; Miguez, Joaquin

An Adaptive Accept/Reject Sampling Algorithm for Posterior Probability Distributions Inproceedings

In: 2009 IEEE/SP 15th Workshop on Statistical Signal Processing, pp. 45–48, IEEE, Cardiff, 2009, ISBN: 978-1-4244-2709-3.

Abstract | Links | BibTeX | Tags: adaptive accept/reject sampling, Adaptive rejection sampling, arbitrary target probability distributions, Computer Simulation, Filtering, Monte Carlo integration, Monte Carlo methods, posterior probability distributions, Probability, Probability density function, Probability distribution, Proposals, Rejection sampling, Sampling methods, sensor networks, Signal processing algorithms, signal sampling, Testing

@inproceedings{Martino2009b,

title = {An Adaptive Accept/Reject Sampling Algorithm for Posterior Probability Distributions},

author = {Luca Martino and Joaquin Miguez},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5278644},

isbn = {978-1-4244-2709-3},

year = {2009},

date = {2009-01-01},

booktitle = {2009 IEEE/SP 15th Workshop on Statistical Signal Processing},

pages = {45--48},

publisher = {IEEE},

address = {Cardiff},

abstract = {Accept/reject sampling is a well-known method to generate random samples from arbitrary target probability distributions. It demands the design of a suitable proposal probability density function (pdf) from which candidate samples can be drawn. These samples are either accepted or rejected depending on a test involving the ratio of the target and proposal densities. In this paper we introduce an adaptive method to build a sequence of proposal pdf's that approximate the target density and hence can ensure a high acceptance rate. In order to illustrate the application of the method we design an accept/reject particle filter and then assess its performance and sampling efficiency numerically, by means of computer simulations.},

keywords = {adaptive accept/reject sampling, Adaptive rejection sampling, arbitrary target probability distributions, Computer Simulation, Filtering, Monte Carlo integration, Monte Carlo methods, posterior probability distributions, Probability, Probability density function, Probability distribution, Proposals, Rejection sampling, Sampling methods, sensor networks, Signal processing algorithms, signal sampling, Testing},

pubstate = {published},

tppubtype = {inproceedings}

}