2015
Nazabal, Alfredo; Artés-Rodríguez, Antonio
Discriminative spectral learning of hidden markov models for human activity recognition Proceedings Article
En: 2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 1966–1970, IEEE, Brisbane, 2015, ISBN: 978-1-4673-6997-8.
Resumen | Enlaces | BibTeX | Etiquetas: Accuracy, Bayesian estimation, classify sequential data, Data models, Databases, Discriminative learning, discriminative spectral learning, Hidden Markov models, HMM parameters, Human activity recognition, learning (artificial intelligence), maximum likelihood, maximum likelihood estimation, ML, moment matching learning technique, Observable operator models, sensors, Spectral algorithm, spectral learning, Speech recognition, Training
@inproceedings{Nazabal2015,
title = {Discriminative spectral learning of hidden markov models for human activity recognition},
author = {Alfredo Nazabal and Antonio Art\'{e}s-Rodr\'{i}guez},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7178314},
doi = {10.1109/ICASSP.2015.7178314},
isbn = {978-1-4673-6997-8},
year = {2015},
date = {2015-04-01},
booktitle = {2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},
pages = {1966--1970},
publisher = {IEEE},
address = {Brisbane},
abstract = {Hidden Markov Models (HMMs) are one of the most important techniques to model and classify sequential data. Maximum Likelihood (ML) and (parametric and non-parametric) Bayesian estimation of the HMM parameters suffers from local maxima and in massive datasets they can be specially time consuming. In this paper, we extend the spectral learning of HMMs, a moment matching learning technique free from local maxima, to discriminative HMMs. The resulting method provides the posterior probabilities of the classes without explicitly determining the HMM parameters, and is able to deal with missing labels. We apply the method to Human Activity Recognition (HAR) using two different types of sensors: portable inertial sensors, and fixed, wireless binary sensor networks. Our algorithm outperforms the standard discriminative HMM learning in both complexity and accuracy.},
keywords = {Accuracy, Bayesian estimation, classify sequential data, Data models, Databases, Discriminative learning, discriminative spectral learning, Hidden Markov models, HMM parameters, Human activity recognition, learning (artificial intelligence), maximum likelihood, maximum likelihood estimation, ML, moment matching learning technique, Observable operator models, sensors, Spectral algorithm, spectral learning, Speech recognition, Training},
pubstate = {published},
tppubtype = {inproceedings}
}
2014
Read, Jesse; Bielza, Concha; Larranaga, Pedro
Multi-Dimensional Classification with Super-Classes Artículo de revista
En: IEEE Transactions on Knowledge and Data Engineering, vol. 26, no 7, pp. 1720–1733, 2014, ISSN: 1041-4347.
Resumen | Enlaces | BibTeX | Etiquetas: Accuracy, Bayes methods, Classification, COMPRHENSION, conditional dependence, Context, core goals, data instance, evaluation metrics, Integrated circuit modeling, modeling class dependencies, multi-dimensional, Multi-dimensional classification, multidimensional classification problem, multidimensional datasets, multidimensional learners, multilabel classification, multilabel research, multiple class variables, ordinary class, pattern classification, problem transformation, recently-popularized task, super classes, super-class partitions, tractable running time, Training, Vectors
@article{Read2014bb,
title = {Multi-Dimensional Classification with Super-Classes},
author = {Jesse Read and Concha Bielza and Pedro Larranaga},
url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=6648319},
issn = {1041-4347},
year = {2014},
date = {2014-01-01},
journal = {IEEE Transactions on Knowledge and Data Engineering},
volume = {26},
number = {7},
pages = {1720--1733},
publisher = {IEEE},
abstract = {The multi-dimensional classification problem is a generalisation of the recently-popularised task of multi-label classification, where each data instance is associated with multiple class variables. There has been relatively little research carried out specific to multi-dimensional classification and, although one of the core goals is similar (modelling dependencies among classes), there are important differences; namely a higher number of possible classifications. In this paper we present method for multi-dimensional classification, drawing from the most relevant multi-label research, and combining it with important novel developments. Using a fast method to model the conditional dependence between class variables, we form super-class partitions and use them to build multi-dimensional learners, learning each super-class as an ordinary class, and thus explicitly modelling class dependencies. Additionally, we present a mechanism to deal with the many class values inherent to super-classes, and thus make learning efficient. To investigate the effectiveness of this approach we carry out an empirical evaluation on a range of multi-dimensional datasets, under different evaluation metrics, and in comparison with high-performing existing multi-dimensional approaches from the literature. Analysis of results shows that our approach offers important performance gains over competing methods, while also exhibiting tractable running time.},
keywords = {Accuracy, Bayes methods, Classification, COMPRHENSION, conditional dependence, Context, core goals, data instance, evaluation metrics, Integrated circuit modeling, modeling class dependencies, multi-dimensional, Multi-dimensional classification, multidimensional classification problem, multidimensional datasets, multidimensional learners, multilabel classification, multilabel research, multiple class variables, ordinary class, pattern classification, problem transformation, recently-popularized task, super classes, super-class partitions, tractable running time, Training, Vectors},
pubstate = {published},
tppubtype = {article}
}