### 2015

Elvira, Victor; Martino, Luca; Luengo, David; Corander, Jukka

A Gradient Adaptive Population Importance Sampler Artículo en actas

En: 2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 4075–4079, IEEE, Brisbane, 2015, ISBN: 978-1-4673-6997-8.

Resumen | Enlaces | BibTeX | Etiquetas: adaptive extensions, adaptive importance sampler, Adaptive importance sampling, gradient adaptive population, gradient matrix, Hamiltonian Monte Carlo, Hessian matrices, Hessian matrix, learning (artificial intelligence), Machine learning, MC methods, Monte Carlo, Monte Carlo methods, population Monte Carlo (PMC), proposal densities, Signal processing, Sociology, statistics, target distribution

@inproceedings{Elvira2015a,

title = {A Gradient Adaptive Population Importance Sampler},

author = {Victor Elvira and Luca Martino and David Luengo and Jukka Corander},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7178737 http://www.tsc.uc3m.es/~velvira/papers/ICASSP2015_elvira.pdf},

doi = {10.1109/ICASSP.2015.7178737},

isbn = {978-1-4673-6997-8},

year = {2015},

date = {2015-04-01},

booktitle = {2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},

pages = {4075--4079},

publisher = {IEEE},

address = {Brisbane},

abstract = {Monte Carlo (MC) methods are widely used in signal processing and machine learning. A well-known class of MC methods is composed of importance sampling and its adaptive extensions (e.g., population Monte Carlo). In this paper, we introduce an adaptive importance sampler using a population of proposal densities. The novel algorithm dynamically optimizes the cloud of proposals, adapting them using information about the gradient and Hessian matrix of the target distribution. Moreover, a new kind of interaction in the adaptation of the proposal densities is introduced, establishing a trade-off between attaining a good performance in terms of mean square error and robustness to initialization.},

keywords = {adaptive extensions, adaptive importance sampler, Adaptive importance sampling, gradient adaptive population, gradient matrix, Hamiltonian Monte Carlo, Hessian matrices, Hessian matrix, learning (artificial intelligence), Machine learning, MC methods, Monte Carlo, Monte Carlo methods, population Monte Carlo (PMC), proposal densities, Signal processing, Sociology, statistics, target distribution},

pubstate = {published},

tppubtype = {inproceedings}

}

Nazabal, Alfredo; Artés-Rodríguez, Antonio

Discriminative spectral learning of hidden markov models for human activity recognition Artículo en actas

En: 2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 1966–1970, IEEE, Brisbane, 2015, ISBN: 978-1-4673-6997-8.

Resumen | Enlaces | BibTeX | Etiquetas: Accuracy, Bayesian estimation, classify sequential data, Data models, Databases, Discriminative learning, discriminative spectral learning, Hidden Markov models, HMM parameters, Human activity recognition, learning (artificial intelligence), maximum likelihood, maximum likelihood estimation, ML, moment matching learning technique, Observable operator models, sensors, Spectral algorithm, spectral learning, Speech recognition, Training

@inproceedings{Nazabal2015,

title = {Discriminative spectral learning of hidden markov models for human activity recognition},

author = {Alfredo Nazabal and Antonio Art\'{e}s-Rodr\'{i}guez},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7178314},

doi = {10.1109/ICASSP.2015.7178314},

isbn = {978-1-4673-6997-8},

year = {2015},

date = {2015-04-01},

booktitle = {2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},

pages = {1966--1970},

publisher = {IEEE},

address = {Brisbane},

abstract = {Hidden Markov Models (HMMs) are one of the most important techniques to model and classify sequential data. Maximum Likelihood (ML) and (parametric and non-parametric) Bayesian estimation of the HMM parameters suffers from local maxima and in massive datasets they can be specially time consuming. In this paper, we extend the spectral learning of HMMs, a moment matching learning technique free from local maxima, to discriminative HMMs. The resulting method provides the posterior probabilities of the classes without explicitly determining the HMM parameters, and is able to deal with missing labels. We apply the method to Human Activity Recognition (HAR) using two different types of sensors: portable inertial sensors, and fixed, wireless binary sensor networks. Our algorithm outperforms the standard discriminative HMM learning in both complexity and accuracy.},

keywords = {Accuracy, Bayesian estimation, classify sequential data, Data models, Databases, Discriminative learning, discriminative spectral learning, Hidden Markov models, HMM parameters, Human activity recognition, learning (artificial intelligence), maximum likelihood, maximum likelihood estimation, ML, moment matching learning technique, Observable operator models, sensors, Spectral algorithm, spectral learning, Speech recognition, Training},

pubstate = {published},

tppubtype = {inproceedings}

}

Martino, Luca; Elvira, Victor; Luengo, David; Artés-Rodríguez, Antonio; Corander, Jukka

Smelly Parallel MCMC Chains Artículo en actas

En: 2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 4070–4074, IEEE, Brisbane, 2015, ISBN: 978-1-4673-6997-8.

Resumen | Enlaces | BibTeX | Etiquetas: Bayesian inference, learning (artificial intelligence), Machine learning, Markov chain Monte Carlo, Markov chain Monte Carlo algorithms, Markov processes, MC methods, MCMC algorithms, MCMC scheme, mean square error, mean square error methods, Monte Carlo methods, optimisation, parallel and interacting chains, Probability density function, Proposals, robustness, Sampling methods, Signal processing, Signal processing algorithms, signal sampling, smelly parallel chains, smelly parallel MCMC chains, Stochastic optimization

@inproceedings{Martino2015a,

title = {Smelly Parallel MCMC Chains},

author = {Luca Martino and Victor Elvira and David Luengo and Antonio Art\'{e}s-Rodr\'{i}guez and Jukka Corander},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7178736 http://www.tsc.uc3m.es/~velvira/papers/ICASSP2015_martino.pdf},

doi = {10.1109/ICASSP.2015.7178736},

isbn = {978-1-4673-6997-8},

year = {2015},

date = {2015-04-01},

booktitle = {2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},

pages = {4070--4074},

publisher = {IEEE},

address = {Brisbane},

abstract = {Monte Carlo (MC) methods are useful tools for Bayesian inference and stochastic optimization that have been widely applied in signal processing and machine learning. A well-known class of MC methods are Markov Chain Monte Carlo (MCMC) algorithms. In this work, we introduce a novel parallel interacting MCMC scheme, where the parallel chains share information, thus yielding a faster exploration of the state space. The interaction is carried out generating a dynamic repulsion among the “smelly” parallel chains that takes into account the entire population of current states. The ergodicity of the scheme and its relationship with other sampling methods are discussed. Numerical results show the advantages of the proposed approach in terms of mean square error, robustness w.r.t. to initial values and parameter choice.},

keywords = {Bayesian inference, learning (artificial intelligence), Machine learning, Markov chain Monte Carlo, Markov chain Monte Carlo algorithms, Markov processes, MC methods, MCMC algorithms, MCMC scheme, mean square error, mean square error methods, Monte Carlo methods, optimisation, parallel and interacting chains, Probability density function, Proposals, robustness, Sampling methods, Signal processing, Signal processing algorithms, signal sampling, smelly parallel chains, smelly parallel MCMC chains, Stochastic optimization},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2013

Luengo, David; Via, Javier; Monzon, Sandra; Trigano, Tom; Artés-Rodríguez, Antonio

Cross-Products LASSO Artículo en actas

En: 2013 IEEE International Conference on Acoustics, Speech and Signal Processing, pp. 6118–6122, IEEE, Vancouver, 2013, ISSN: 1520-6149.

Resumen | Enlaces | BibTeX | Etiquetas: Approximation methods, approximation theory, concave programming, convex programming, Cost function, cross-product LASSO cost function, Dictionaries, dictionary, Encoding, LASSO, learning (artificial intelligence), negative co-occurrence, negative cooccurrence phenomenon, nonconvex optimization problem, Signal processing, signal processing application, signal reconstruction, sparse coding, sparse learning approach, Sparse matrices, sparsity-aware learning, successive convex approximation, Vectors

@inproceedings{Luengo2013,

title = {Cross-Products LASSO},

author = {David Luengo and Javier Via and Sandra Monzon and Tom Trigano and Antonio Art\'{e}s-Rodr\'{i}guez},

url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=6638840},

issn = {1520-6149},

year = {2013},

date = {2013-01-01},

booktitle = {2013 IEEE International Conference on Acoustics, Speech and Signal Processing},

pages = {6118--6122},

publisher = {IEEE},

address = {Vancouver},

abstract = {Negative co-occurrence is a common phenomenon in many signal processing applications. In some cases the signals involved are sparse, and this information can be exploited to recover them. In this paper, we present a sparse learning approach that explicitly takes into account negative co-occurrence. This is achieved by adding a novel penalty term to the LASSO cost function based on the cross-products between the reconstruction coefficients. Although the resulting optimization problem is non-convex, we develop a new and efficient method for solving it based on successive convex approximations. Results on synthetic data, for both complete and overcomplete dictionaries, are provided to validate the proposed approach.},

keywords = {Approximation methods, approximation theory, concave programming, convex programming, Cost function, cross-product LASSO cost function, Dictionaries, dictionary, Encoding, LASSO, learning (artificial intelligence), negative co-occurrence, negative cooccurrence phenomenon, nonconvex optimization problem, Signal processing, signal processing application, signal reconstruction, sparse coding, sparse learning approach, Sparse matrices, sparsity-aware learning, successive convex approximation, Vectors},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2012

O'Mahony, Niamh; Perez-Cruz, Fernando

A novel Sequential Bayesian Approach to GPS Acquisition Artículo en actas

En: 2012 3rd International Workshop on Cognitive Information Processing (CIP), pp. 1–6, IEEE, Baiona, 2012, ISBN: 978-1-4673-1878-5.

Resumen | Enlaces | BibTeX | Etiquetas: Bayes methods, coarse synchronization, Correlators, data acquisition, Delay, Doppler effect, Global Positioning System, GPS acquisition, GPS signal parameters, learning (artificial intelligence), online learning algorithm, Receivers, Satellites, sequential Bayesian approach, signal acquisition, signal detection, Synchronization

@inproceedings{O'Mahony2012,

title = {A novel Sequential Bayesian Approach to GPS Acquisition},

author = {Niamh O'Mahony and Fernando Perez-Cruz},

url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=6232921},

isbn = {978-1-4673-1878-5},

year = {2012},

date = {2012-01-01},

booktitle = {2012 3rd International Workshop on Cognitive Information Processing (CIP)},

pages = {1--6},

publisher = {IEEE},

address = {Baiona},

abstract = {In this work, a novel online learning algorithm is presented for the synchronization of Global Positioning System (GPS) signal parameters at the acquisition, or coarse synchronization, stage. The algorithm is based on a Bayesian approach, which has, to date, not been exploited for the acquisition problem. Simulated results are presented to illustrate the algorithm performance, in terms of accuracy and acquisition time, along with results from the acquisition of signals from live GPS satellites using both the new algorithm and a state-of-the-art approach for comparison.},

keywords = {Bayes methods, coarse synchronization, Correlators, data acquisition, Delay, Doppler effect, Global Positioning System, GPS acquisition, GPS signal parameters, learning (artificial intelligence), online learning algorithm, Receivers, Satellites, sequential Bayesian approach, signal acquisition, signal detection, Synchronization},

pubstate = {published},

tppubtype = {inproceedings}

}