2021
Sevilla-Salcedo, Carlos; Gómez-Verdejo, Vanessa; Olmos, Pablo M
Sparse semi-supervised heterogeneous interbattery bayesian analysis Artículo de revista
En: Pattern Recognition, vol. 120, pp. 108141, 2021, ISSN: 0031-3203.
Resumen | Enlaces | BibTeX | Etiquetas: Bayesian model, Canonical correlation analysis, Factor analysis, Feature selection, Multi-task, Principal component analysis, Semi-supervised
@article{SEVILLASALCEDO2021108141,
title = {Sparse semi-supervised heterogeneous interbattery bayesian analysis},
author = {Carlos Sevilla-Salcedo and Vanessa G\'{o}mez-Verdejo and Pablo M Olmos},
url = {https://www.sciencedirect.com/science/article/pii/S0031320321003289},
doi = {https://doi.org/10.1016/j.patcog.2021.108141},
issn = {0031-3203},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {Pattern Recognition},
volume = {120},
pages = {108141},
abstract = {The Bayesian approach to feature extraction, known as factor analysis (FA), has been widely studied in machine learning to obtain a latent representation of the data. An adequate selection of the probabilities and priors of these bayesian models allows the model to better adapt to the data nature (i.e. heterogeneity, sparsity), obtaining a more representative latent space. The objective of this article is to propose a general FA framework capable of modelling any problem. To do so, we start from the Bayesian Inter-Battery Factor Analysis (BIBFA) model, enhancing it with new functionalities to be able to work with heterogeneous data, to include feature selection, and to handle missing values as well as semi-supervised problems. The performance of the proposed model, Sparse Semi-supervised Heterogeneous Interbattery Bayesian Analysis (SSHIBA), has been tested on different scenarios to evaluate each one of its novelties, showing not only a great versatility and an interpretability gain, but also outperforming most of the state-of-the-art algorithms.},
keywords = {Bayesian model, Canonical correlation analysis, Factor analysis, Feature selection, Multi-task, Principal component analysis, Semi-supervised},
pubstate = {published},
tppubtype = {article}
}
2016
Song, Yang; Schreier, Peter J; Ramírez, David; Hasija, Tanuj
Canonical Correlation Analysis of High-Dimensional Data With Very Small Sample Support Artículo de revista
En: Signal Processing, vol. 128, pp. 449–458, 2016, ISSN: 01651684.
Resumen | Enlaces | BibTeX | Etiquetas: Bartlett-Lawley statistic, Canonical correlation analysis, Journal, Model-order selection, Principal component analysis, Small sample support
@article{Song2016,
title = {Canonical Correlation Analysis of High-Dimensional Data With Very Small Sample Support},
author = {Yang Song and Peter J Schreier and David Ram\'{i}rez and Tanuj Hasija},
url = {http://www.sciencedirect.com/science/article/pii/S0165168416300834},
doi = {10.1016/j.sigpro.2016.05.020},
issn = {01651684},
year = {2016},
date = {2016-11-01},
journal = {Signal Processing},
volume = {128},
pages = {449--458},
abstract = {This paper is concerned with the analysis of correlation between two high-dimensional data sets when there are only few correlated signal components but the number of samples is very small, possibly much smaller than the dimensions of the data. In such a scenario, a principal component analysis (PCA) rank-reduction preprocessing step is commonly performed before applying canonical correlation analysis (CCA). We present simple, yet very effective, approaches to the joint model-order selection of the number of dimensions that should be retained through the PCA step and the number of correlated signals. These approaches are based on reduced-rank versions of the Bartlett\textendashLawley hypothesis test and the minimum description length information-theoretic criterion. Simulation results show that the techniques perform well for very small sample sizes even in colored noise.},
keywords = {Bartlett-Lawley statistic, Canonical correlation analysis, Journal, Model-order selection, Principal component analysis, Small sample support},
pubstate = {published},
tppubtype = {article}
}
2013
Vazquez, Manuel A; Jin, Jing; Dauwels, Justin; Vialatte, Francois B
Automated Detection of Paroxysmal Gamma Waves in Meditation EEG Proceedings Article
En: 2013 IEEE International Conference on Acoustics, Speech and Signal Processing, pp. 1192–1196, IEEE, Vancouver, 2013, ISSN: 1520-6149.
Resumen | Enlaces | BibTeX | Etiquetas: automated detection, Bhramari Pranayama, Blind source separation, brain active region, brain multiple source identification, Detectors, EEG activity, Electroencephalogram, Electroencephalography, left temporal lobe, medical signal detection, Meditation, meditation EEG, meditator, neurophysiology, neuroscience, Paroxysmal gamma wave, paroxysmal gamma waves, PGW, Principal component analysis, Sensitivity, signal processing community, Spike detection, Temporal lobe, yoga type meditation
@inproceedings{Vazquez2013,
title = {Automated Detection of Paroxysmal Gamma Waves in Meditation EEG},
author = {Manuel A Vazquez and Jing Jin and Justin Dauwels and Francois B Vialatte},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6637839},
issn = {1520-6149},
year = {2013},
date = {2013-01-01},
booktitle = {2013 IEEE International Conference on Acoustics, Speech and Signal Processing},
pages = {1192--1196},
publisher = {IEEE},
address = {Vancouver},
abstract = {Meditation is a fascinating topic, yet has received limited attention in the neuroscience and signal processing community so far. A few studies have investigated electroencephalograms (EEG) recorded during meditation. Strong EEG activity has been observed in the left temporal lobe of meditators. Meditators exhibit more paroxysmal gamma waves (PGWs) in active regions of the brain. In this paper, a method is proposed to automatically detect PGWs from meditation EEG. The proposed algorithm is able to identify multiple sources in the brain that generate PGWs, and the sources associated with different types of PGWs can be distinguished. The effectiveness of the proposed method is assessed on 3 subjects possessing different degrees of expertise in practicing a yoga type meditation known as Bhramari Pranayama.},
keywords = {automated detection, Bhramari Pranayama, Blind source separation, brain active region, brain multiple source identification, Detectors, EEG activity, Electroencephalogram, Electroencephalography, left temporal lobe, medical signal detection, Meditation, meditation EEG, meditator, neurophysiology, neuroscience, Paroxysmal gamma wave, paroxysmal gamma waves, PGW, Principal component analysis, Sensitivity, signal processing community, Spike detection, Temporal lobe, yoga type meditation},
pubstate = {published},
tppubtype = {inproceedings}
}
2012
Garcia-Moreno, Pablo; Artés-Rodríguez, Antonio; Hansen, Lars Kai
A Hold-out Method to Correct PCA Variance Inflation Proceedings Article
En: 2012 3rd International Workshop on Cognitive Information Processing (CIP), pp. 1–6, IEEE, Baiona, 2012, ISBN: 978-1-4673-1878-5.
Resumen | Enlaces | BibTeX | Etiquetas: Approximation methods, classification scenario, computational complexity, computational cost, Computational efficiency, correction method, hold-out method, hold-out procedure, leave-one-out procedure, LOO method, LOO procedure, Mathematical model, PCA algorithm, PCA variance inflation, Principal component analysis, singular value decomposition, Standards, SVD, Training
@inproceedings{Garcia-Moreno2012,
title = {A Hold-out Method to Correct PCA Variance Inflation},
author = {Pablo Garcia-Moreno and Antonio Art\'{e}s-Rodr\'{i}guez and Lars Kai Hansen},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6232926},
isbn = {978-1-4673-1878-5},
year = {2012},
date = {2012-01-01},
booktitle = {2012 3rd International Workshop on Cognitive Information Processing (CIP)},
pages = {1--6},
publisher = {IEEE},
address = {Baiona},
abstract = {In this paper we analyze the problem of variance inflation experienced by the PCA algorithm when working in an ill-posed scenario where the dimensionality of the training set is larger than its sample size. In an earlier article a correction method based on a Leave-One-Out (LOO) procedure was introduced. We propose a Hold-out procedure whose computational cost is lower and, unlike the LOO method, the number of SVD's does not scale with the sample size. We analyze its properties from a theoretical and empirical point of view. Finally we apply it to a real classification scenario.},
keywords = {Approximation methods, classification scenario, computational complexity, computational cost, Computational efficiency, correction method, hold-out method, hold-out procedure, leave-one-out procedure, LOO method, LOO procedure, Mathematical model, PCA algorithm, PCA variance inflation, Principal component analysis, singular value decomposition, Standards, SVD, Training},
pubstate = {published},
tppubtype = {inproceedings}
}