2010
Djuric, Petar M; Miguez, Joaquin
Assessment of Nonlinear Dynamic Models by Kolmogorov–Smirnov Statistics Artículo de revista
En: IEEE Transactions on Signal Processing, vol. 58, no 10, pp. 5069–5079, 2010, ISSN: 1053-587X.
Resumen | Enlaces | BibTeX | Etiquetas: Cumulative distributions, discrete random variables, dynamic nonlinear models, Electrical capacitance tomography, Filtering, filtering theory, Iron, Kolmogorov-Smirnov statistics, Kolomogorov–Smirnov statistics, model assessment, nonlinear dynamic models, nonlinear dynamical systems, Permission, predictive cumulative distributions, predictive distributions, Predictive models, Random variables, Robots, statistical analysis, statistical distributions, statistics, Telecommunication control
@article{Djuric2010a,
title = {Assessment of Nonlinear Dynamic Models by Kolmogorov\textendashSmirnov Statistics},
author = {Petar M Djuric and Joaquin Miguez},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5491124},
issn = {1053-587X},
year = {2010},
date = {2010-01-01},
journal = {IEEE Transactions on Signal Processing},
volume = {58},
number = {10},
pages = {5069--5079},
abstract = {Model assessment is a fundamental problem in science and engineering and it addresses the question of the validity of a model in the light of empirical evidence. In this paper, we propose a method for the assessment of dynamic nonlinear models based on empirical and predictive cumulative distributions of data and the Kolmogorov-Smirnov statistics. The technique is based on the generation of discrete random variables that come from a known discrete distribution if the entertained model is correct. We provide simulation examples that demonstrate the performance of the proposed method.},
keywords = {Cumulative distributions, discrete random variables, dynamic nonlinear models, Electrical capacitance tomography, Filtering, filtering theory, Iron, Kolmogorov-Smirnov statistics, Kolomogorov\textendashSmirnov statistics, model assessment, nonlinear dynamic models, nonlinear dynamical systems, Permission, predictive cumulative distributions, predictive distributions, Predictive models, Random variables, Robots, statistical analysis, statistical distributions, statistics, Telecommunication control},
pubstate = {published},
tppubtype = {article}
}
2007
Leiva-Murillo, Jose M; Artés-Rodríguez, Antonio
Maximization of Mutual Information for Supervised Linear Feature Extraction Artículo de revista
En: IEEE Transactions on Neural Networks, vol. 18, no 5, pp. 1433–1441, 2007, ISSN: 1045-9227.
Resumen | Enlaces | BibTeX | Etiquetas: Algorithms, Artificial Intelligence, Automated, component-by-component gradient-ascent method, Computer Simulation, Data Mining, Entropy, Feature extraction, gradient methods, gradient-based entropy, Independent component analysis, Information Storage and Retrieval, information theory, Iron, learning (artificial intelligence), Linear discriminant analysis, Linear Models, Mutual information, Optimization methods, Pattern recognition, Reproducibility of Results, Sensitivity and Specificity, supervised linear feature extraction, Vectors
@article{Leiva-Murillo2007,
title = {Maximization of Mutual Information for Supervised Linear Feature Extraction},
author = {Jose M Leiva-Murillo and Antonio Art\'{e}s-Rodr\'{i}guez},
url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=4298118},
issn = {1045-9227},
year = {2007},
date = {2007-01-01},
journal = {IEEE Transactions on Neural Networks},
volume = {18},
number = {5},
pages = {1433--1441},
publisher = {IEEE},
abstract = {In this paper, we present a novel scheme for linear feature extraction in classification. The method is based on the maximization of the mutual information (MI) between the features extracted and the classes. The sum of the MI corresponding to each of the features is taken as an heuristic that approximates the MI of the whole output vector. Then, a component-by-component gradient-ascent method is proposed for the maximization of the MI, similar to the gradient-based entropy optimization used in independent component analysis (ICA). The simulation results show that not only is the method competitive when compared to existing supervised feature extraction methods in all cases studied, but it also remarkably outperform them when the data are characterized by strongly nonlinear boundaries between classes.},
keywords = {Algorithms, Artificial Intelligence, Automated, component-by-component gradient-ascent method, Computer Simulation, Data Mining, Entropy, Feature extraction, gradient methods, gradient-based entropy, Independent component analysis, Information Storage and Retrieval, information theory, Iron, learning (artificial intelligence), Linear discriminant analysis, Linear Models, Mutual information, Optimization methods, Pattern recognition, Reproducibility of Results, Sensitivity and Specificity, supervised linear feature extraction, Vectors},
pubstate = {published},
tppubtype = {article}
}