2015
Nazabal, Alfredo; Artés-Rodríguez, Antonio
Discriminative spectral learning of hidden markov models for human activity recognition Artículo en actas
En: 2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 1966–1970, IEEE, Brisbane, 2015, ISBN: 978-1-4673-6997-8.
Resumen | Enlaces | BibTeX | Etiquetas: Accuracy, Bayesian estimation, classify sequential data, Data models, Databases, Discriminative learning, discriminative spectral learning, Hidden Markov models, HMM parameters, Human activity recognition, learning (artificial intelligence), maximum likelihood, maximum likelihood estimation, ML, moment matching learning technique, Observable operator models, sensors, Spectral algorithm, spectral learning, Speech recognition, Training
@inproceedings{Nazabal2015,
title = {Discriminative spectral learning of hidden markov models for human activity recognition},
author = {Alfredo Nazabal and Antonio Art\'{e}s-Rodr\'{i}guez},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7178314},
doi = {10.1109/ICASSP.2015.7178314},
isbn = {978-1-4673-6997-8},
year = {2015},
date = {2015-04-01},
booktitle = {2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},
pages = {1966--1970},
publisher = {IEEE},
address = {Brisbane},
abstract = {Hidden Markov Models (HMMs) are one of the most important techniques to model and classify sequential data. Maximum Likelihood (ML) and (parametric and non-parametric) Bayesian estimation of the HMM parameters suffers from local maxima and in massive datasets they can be specially time consuming. In this paper, we extend the spectral learning of HMMs, a moment matching learning technique free from local maxima, to discriminative HMMs. The resulting method provides the posterior probabilities of the classes without explicitly determining the HMM parameters, and is able to deal with missing labels. We apply the method to Human Activity Recognition (HAR) using two different types of sensors: portable inertial sensors, and fixed, wireless binary sensor networks. Our algorithm outperforms the standard discriminative HMM learning in both complexity and accuracy.},
keywords = {Accuracy, Bayesian estimation, classify sequential data, Data models, Databases, Discriminative learning, discriminative spectral learning, Hidden Markov models, HMM parameters, Human activity recognition, learning (artificial intelligence), maximum likelihood, maximum likelihood estimation, ML, moment matching learning technique, Observable operator models, sensors, Spectral algorithm, spectral learning, Speech recognition, Training},
pubstate = {published},
tppubtype = {inproceedings}
}
2009
Goez, Roger; Lazaro, Marcelino
Training of Neural Classifiers by Separating Distributions at the Hidden Layer Artículo en actas
En: 2009 IEEE International Workshop on Machine Learning for Signal Processing, pp. 1–6, IEEE, Grenoble, 2009, ISBN: 978-1-4244-4947-7.
Resumen | Enlaces | BibTeX | Etiquetas: Artificial neural networks, Bayesian methods, Cost function, Curve fitting, Databases, Function approximation, Neural networks, Speech recognition, Support vector machine classification, Support vector machines
@inproceedings{Goez2009,
title = {Training of Neural Classifiers by Separating Distributions at the Hidden Layer},
author = {Roger Goez and Marcelino Lazaro},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5306240},
isbn = {978-1-4244-4947-7},
year = {2009},
date = {2009-01-01},
booktitle = {2009 IEEE International Workshop on Machine Learning for Signal Processing},
pages = {1--6},
publisher = {IEEE},
address = {Grenoble},
abstract = {A new cost function for training of binary classifiers based on neural networks is proposed. This cost function aims at separating the distributions for patterns of each class at the output of the hidden layer of the network. It has been implemented in a Generalized Radial Basis Function (GRBF) network and its performance has been evaluated under three different databases, showing advantages with respect to the conventional Mean Squared Error (MSE) cost function. With respect to the Support Vector Machine (SVM) classifier, the proposed method has also advantages both in terms of performance and complexity.},
keywords = {Artificial neural networks, Bayesian methods, Cost function, Curve fitting, Databases, Function approximation, Neural networks, Speech recognition, Support vector machine classification, Support vector machines},
pubstate = {published},
tppubtype = {inproceedings}
}