### 2020

Romero-Medrano, Lorena; Moreno-Muñoz, P; Artés-Rodríguez, Antonio

Multinomial Sampling for hierarchical Change-Point Detection Inproceedings

In: 2020 IEEE International Workshop on Machine Learning for Signal Processing, 2020.

BibTeX | Tags: Bayesian inference, change-point detection (CPD), latent variable models, multinomial likelihoods

@inproceedings{AArtes20g,

title = {Multinomial Sampling for hierarchical Change-Point Detection},

author = {Lorena Romero-Medrano and P Moreno-Muñoz and Antonio Artés-Rodríguez},

year = {2020},

date = {2020-09-21},

booktitle = {2020 IEEE International Workshop on Machine Learning for Signal Processing},

keywords = {Bayesian inference, change-point detection (CPD), latent variable models, multinomial likelihoods},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2018

Martino, Luca; Elvira, Victor; Miguez, Joaquín; Artés-Rodríguez, Antonio; Djuric, Petar M

A Comparison Of Clipping Strategies For Importance Sampling Inproceedings

In: 2018 IEEE Statistical Signal Processing Workshop (SSP), 2018.

Links | BibTeX | Tags: Bayesian inference, Importance sampling, Monte Carlo methods, Parameter estimation, Variance Reduction methods

@inproceedings{JMiguez18d,

title = {A Comparison Of Clipping Strategies For Importance Sampling},

author = {Luca Martino and Victor Elvira and Joaquín Miguez and Antonio Artés-Rodríguez and Petar M Djuric},

doi = {10.1109/SSP.2018.8450722},

year = {2018},

date = {2018-06-10},

booktitle = {2018 IEEE Statistical Signal Processing Workshop (SSP)},

keywords = {Bayesian inference, Importance sampling, Monte Carlo methods, Parameter estimation, Variance Reduction methods},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2015

Martino, Luca; Elvira, Victor; Luengo, David; Corander, Jukka

Parallel interacting Markov adaptive importance sampling Inproceedings

In: 2015 23rd European Signal Processing Conference (EUSIPCO), pp. 499–503, IEEE, Nice, 2015, ISBN: 978-0-9928-6263-3.

Abstract | Links | BibTeX | Tags: Adaptive importance sampling, Bayesian inference, MCMC methods, Monte Carlo methods, Parallel Chains, Probability density function, Proposals, Signal processing, Signal processing algorithms, Sociology

@inproceedings{Martino2015bb,

title = {Parallel interacting Markov adaptive importance sampling},

author = {Luca Martino and Victor Elvira and David Luengo and Jukka Corander},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7362433 http://www.eurasip.org/Proceedings/Eusipco/Eusipco2015/papers/1570111267.pdf},

doi = {10.1109/EUSIPCO.2015.7362433},

isbn = {978-0-9928-6263-3},

year = {2015},

date = {2015-08-01},

booktitle = {2015 23rd European Signal Processing Conference (EUSIPCO)},

pages = {499--503},

publisher = {IEEE},

address = {Nice},

abstract = {Monte Carlo (MC) methods are widely used for statistical inference in signal processing applications. A well-known class of MC methods is importance sampling (IS) and its adaptive extensions. In this work, we introduce an iterated importance sampler using a population of proposal densities, which are adapted according to an MCMC technique over the population of location parameters. The novel algorithm provides a global estimation of the variables of interest iteratively, using all the samples weighted according to the deterministic mixture scheme. Numerical results, on a multi-modal example and a localization problem in wireless sensor networks, show the advantages of the proposed schemes.},

keywords = {Adaptive importance sampling, Bayesian inference, MCMC methods, Monte Carlo methods, Parallel Chains, Probability density function, Proposals, Signal processing, Signal processing algorithms, Sociology},

pubstate = {published},

tppubtype = {inproceedings}

}

Martino, Luca; Elvira, Victor; Luengo, David; Artés-Rodríguez, Antonio; Corander, Jukka

Smelly Parallel MCMC Chains Inproceedings

In: 2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 4070–4074, IEEE, Brisbane, 2015, ISBN: 978-1-4673-6997-8.

Abstract | Links | BibTeX | Tags: Bayesian inference, learning (artificial intelligence), Machine learning, Markov chain Monte Carlo, Markov chain Monte Carlo algorithms, Markov processes, MC methods, MCMC algorithms, MCMC scheme, mean square error, mean square error methods, Monte Carlo methods, optimisation, parallel and interacting chains, Probability density function, Proposals, robustness, Sampling methods, Signal processing, Signal processing algorithms, signal sampling, smelly parallel chains, smelly parallel MCMC chains, Stochastic optimization

@inproceedings{Martino2015a,

title = {Smelly Parallel MCMC Chains},

author = {Luca Martino and Victor Elvira and David Luengo and Antonio Artés-Rodríguez and Jukka Corander},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7178736 http://www.tsc.uc3m.es/~velvira/papers/ICASSP2015_martino.pdf},

doi = {10.1109/ICASSP.2015.7178736},

isbn = {978-1-4673-6997-8},

year = {2015},

date = {2015-04-01},

booktitle = {2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},

pages = {4070--4074},

publisher = {IEEE},

address = {Brisbane},

abstract = {Monte Carlo (MC) methods are useful tools for Bayesian inference and stochastic optimization that have been widely applied in signal processing and machine learning. A well-known class of MC methods are Markov Chain Monte Carlo (MCMC) algorithms. In this work, we introduce a novel parallel interacting MCMC scheme, where the parallel chains share information, thus yielding a faster exploration of the state space. The interaction is carried out generating a dynamic repulsion among the “smelly” parallel chains that takes into account the entire population of current states. The ergodicity of the scheme and its relationship with other sampling methods are discussed. Numerical results show the advantages of the proposed approach in terms of mean square error, robustness w.r.t. to initial values and parameter choice.},

keywords = {Bayesian inference, learning (artificial intelligence), Machine learning, Markov chain Monte Carlo, Markov chain Monte Carlo algorithms, Markov processes, MC methods, MCMC algorithms, MCMC scheme, mean square error, mean square error methods, Monte Carlo methods, optimisation, parallel and interacting chains, Probability density function, Proposals, robustness, Sampling methods, Signal processing, Signal processing algorithms, signal sampling, smelly parallel chains, smelly parallel MCMC chains, Stochastic optimization},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2014

Martino, Luca; Elvira, Víctor; Luengo, David; Artés-Rodríguez, Antonio; Corander, Jukka

Orthogonal MCMC Algorithms Inproceedings

In: 2014 IEEE Workshop on Statistical Signal Processing (SSP 2014), Gold Coast, 2014.

Abstract | Links | BibTeX | Tags: Bayesian inference, Markov Chain Monte Carlo (MCMC), Parallel Chains, population Monte Carlo

@inproceedings{Martino2014b,

title = {Orthogonal MCMC Algorithms},

author = {Luca Martino and Víctor Elvira and David Luengo and Antonio Artés-Rodríguez and Jukka Corander},

url = {http://edas.info/p15153#S1569490857},

year = {2014},

date = {2014-01-01},

booktitle = {2014 IEEE Workshop on Statistical Signal Processing (SSP 2014)},

address = {Gold Coast},

abstract = {Monte Carlo (MC) methods are widely used in signal processing, machine learning and stochastic optimization. A wellknown class of MC methods are Markov Chain Monte Carlo (MCMC) algorithms. In this work, we introduce a novel parallel interacting MCMC scheme, where the parallel chains share information using another MCMC technique working on the entire population of current states. These parallel “vertical” chains are led by random-walk proposals, whereas the “horizontal” MCMC uses a independent proposal, which can be easily adapted by making use of all the generated samples. Numerical results show the advantages of the proposed sampling scheme in terms of mean absolute error, as well as robustness w.r.t. to initial values and parameter choice.},

keywords = {Bayesian inference, Markov Chain Monte Carlo (MCMC), Parallel Chains, population Monte Carlo},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2013

Read, Jesse; Martino, Luca; Luengo, David

Eficient Monte Carlo Optimization for Multi-Label Classifier Chains Inproceedings

In: ICASSP 2013: The 38th International Conference on Acoustics, Speech, and Signal Processing, Vancouver, 2013.

Abstract | BibTeX | Tags: Bayesian inference, Classifier chains, Monte Carlo methods, Multi-dimensional classification, Multi-label classification

@inproceedings{Read2013,

title = {Eficient Monte Carlo Optimization for Multi-Label Classifier Chains},

author = {Jesse Read and Luca Martino and David Luengo},

year = {2013},

date = {2013-01-01},

booktitle = {ICASSP 2013: The 38th International Conference on Acoustics, Speech, and Signal Processing},

address = {Vancouver},

abstract = {Multi-dimensional classification (MDC) is the supervised learning problem where an instance is associated with multiple classes, rather than with a single class, as in traditional classification problems. Since these classes are often strongly correlated, modeling the dependencies between them allows MDC methods to improve their performance at the expense of an increased computational cost. In this paper we focus on the classifier chains (CC) approach for modeling dependencies, one of the most popular and highest- performing methods for multi-label classification (MLC), a particular case of MDC which involves only binary classes (i.e., labels). The original CC algorithm makes a greedy approximation, and is fast but tends to propagate errors along the chain. Here we present novel Monte Carlo schemes, both for nding a good chain sequence and performing ecient inference. Our algorithms remain tractable for high-dimensional data sets and obtain the best predictive performance across several real data sets.},

keywords = {Bayesian inference, Classifier chains, Monte Carlo methods, Multi-dimensional classification, Multi-label classification},

pubstate = {published},

tppubtype = {inproceedings}

}