## 2016 |

## Journal Articles |

Vazquez-Vilar, Gonzalo; Tauste Campo, Adria; Guillen i Fabregas, Albert; Martinez, Alfonso Bayesian M-Ary Hypothesis Testing: The Meta-Converse and Verdú-Han Bounds Are Tight (Journal Article) IEEE Transactions on Information Theory, 62 (5), pp. 2324–2333, 2016, ISSN: 0018-9448. (Abstract | Links | BibTeX | Tags: Bayes methods, Channel Coding, Electronic mail, error probability, Journal, Random variables, Testing) @article{Vazquez-Vilar2016, title = {Bayesian M-Ary Hypothesis Testing: The Meta-Converse and Verdú-Han Bounds Are Tight}, author = {Vazquez-Vilar, Gonzalo and Tauste Campo, Adria and Guillen i Fabregas, Albert and Martinez, Alfonso}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7434042}, doi = {10.1109/TIT.2016.2542080}, issn = {0018-9448}, year = {2016}, date = {2016-05-01}, journal = {IEEE Transactions on Information Theory}, volume = {62}, number = {5}, pages = {2324--2333}, abstract = {Two alternative exact characterizations of the minimum error probability of Bayesian M-ary hypothesis testing are derived. The first expression corresponds to the error probability of an induced binary hypothesis test and implies the tightness of the meta-converse bound by Polyanskiy et al.; the second expression is a function of an information-spectrum measure and implies the tightness of a generalized Verdú-Han lower bound. The formulas characterize the minimum error probability of several problems in information theory and help to identify the steps where existing converse bounds are loose.}, keywords = {Bayes methods, Channel Coding, Electronic mail, error probability, Journal, Random variables, Testing}, pubstate = {published}, tppubtype = {article} } Two alternative exact characterizations of the minimum error probability of Bayesian M-ary hypothesis testing are derived. The first expression corresponds to the error probability of an induced binary hypothesis test and implies the tightness of the meta-converse bound by Polyanskiy et al.; the second expression is a function of an information-spectrum measure and implies the tightness of a generalized Verdú-Han lower bound. The formulas characterize the minimum error probability of several problems in information theory and help to identify the steps where existing converse bounds are loose. |

## 2015 |

## Journal Articles |

Ramírez, David; Schreier, Peter; Via, Javier; Santamaria, Ignacio; Scharf, Louis Detection of Multivariate Cyclostationarity (Journal Article) IEEE Transactions on Signal Processing, 63 (20), pp. 5395–5408, 2015, ISSN: 1053-587X. (Abstract | Links | BibTeX | Tags: ad hoc function, asymptotic GLRT, asymptotic LMPIT, block circulant, block-Toeplitz structure, Correlation, covariance matrices, Covariance matrix, covariance structure, cycle period, cyclic spectrum, Cyclostationarity, Detectors, Frequency-domain analysis, generalized likelihood ratio test, generalized likelihood ratio test (GLRT), hypothesis testing problem, locally most powerful invariant test, locally most powerful invariant test (LMPIT), Loe{&}{#}x0300, maximum likelihood estimation, multivariate cyclostationarity detection, power spectral density, random processes, s theorem, scalar valued CS time series, signal detection, spectral analysis, statistical testing, Testing, Time series, Time series analysis, Toeplitz matrices, Toeplitz matrix, ve spectrum, vector valued random process cyclostationary, vector valued WSS time series, wide sense stationary, Wijsman theorem, Wijsman{&}{#}x2019) @article{Ramirez2015, title = {Detection of Multivariate Cyclostationarity}, author = {Ramírez, David and Schreier, Peter J. and Via, Javier and Santamaria, Ignacio and Scharf, Louis L.}, url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=7134806}, doi = {10.1109/TSP.2015.2450201}, issn = {1053-587X}, year = {2015}, date = {2015-10-01}, journal = {IEEE Transactions on Signal Processing}, volume = {63}, number = {20}, pages = {5395--5408}, publisher = {IEEE}, abstract = {This paper derives an asymptotic generalized likelihood ratio test (GLRT) and an asymptotic locally most powerful invariant test (LMPIT) for two hypothesis testing problems: 1) Is a vector-valued random process cyclostationary (CS) or is it wide-sense stationary (WSS)? 2) Is a vector-valued random process CS or is it nonstationary? Our approach uses the relationship between a scalar-valued CS time series and a vector-valued WSS time series for which the knowledge of the cycle period is required. This relationship allows us to formulate the problem as a test for the covariance structure of the observations. The covariance matrix of the observations has a block-Toeplitz structure for CS and WSS processes. By considering the asymptotic case where the covariance matrix becomes block-circulant we are able to derive its maximum likelihood (ML) estimate and thus an asymptotic GLRT. Moreover, using Wijsman's theorem, we also obtain an asymptotic LMPIT. These detectors may be expressed in terms of the Loève spectrum, the cyclic spectrum, and the power spectral density, establishing how to fuse the information in these spectra for an asymptotic GLRT and LMPIT. This goes beyond the state-of-the-art, where it is common practice to build detectors of cyclostationarity from ad-hoc functions of these spectra.}, keywords = {ad hoc function, asymptotic GLRT, asymptotic LMPIT, block circulant, block-Toeplitz structure, Correlation, covariance matrices, Covariance matrix, covariance structure, cycle period, cyclic spectrum, Cyclostationarity, Detectors, Frequency-domain analysis, generalized likelihood ratio test, generalized likelihood ratio test (GLRT), hypothesis testing problem, locally most powerful invariant test, locally most powerful invariant test (LMPIT), Loe{&}{#}x0300, maximum likelihood estimation, multivariate cyclostationarity detection, power spectral density, random processes, s theorem, scalar valued CS time series, signal detection, spectral analysis, statistical testing, Testing, Time series, Time series analysis, Toeplitz matrices, Toeplitz matrix, ve spectrum, vector valued random process cyclostationary, vector valued WSS time series, wide sense stationary, Wijsman theorem, Wijsman{&}{#}x2019}, pubstate = {published}, tppubtype = {article} } This paper derives an asymptotic generalized likelihood ratio test (GLRT) and an asymptotic locally most powerful invariant test (LMPIT) for two hypothesis testing problems: 1) Is a vector-valued random process cyclostationary (CS) or is it wide-sense stationary (WSS)? 2) Is a vector-valued random process CS or is it nonstationary? Our approach uses the relationship between a scalar-valued CS time series and a vector-valued WSS time series for which the knowledge of the cycle period is required. This relationship allows us to formulate the problem as a test for the covariance structure of the observations. The covariance matrix of the observations has a block-Toeplitz structure for CS and WSS processes. By considering the asymptotic case where the covariance matrix becomes block-circulant we are able to derive its maximum likelihood (ML) estimate and thus an asymptotic GLRT. Moreover, using Wijsman's theorem, we also obtain an asymptotic LMPIT. These detectors may be expressed in terms of the Loève spectrum, the cyclic spectrum, and the power spectral density, establishing how to fuse the information in these spectra for an asymptotic GLRT and LMPIT. This goes beyond the state-of-the-art, where it is common practice to build detectors of cyclostationarity from ad-hoc functions of these spectra. |

## Inproceedings |

Vazquez-Vilar, Gonzalo; Martinez, Alfonso; Guillen i Fabregas, Albert A derivation of the Cost-Constrained Sphere-Packing Exponent (Inproceeding) 2015 IEEE International Symposium on Information Theory (ISIT), pp. 929–933, IEEE, Hong Kong, 2015, ISBN: 978-1-4673-7704-1. (Links | BibTeX | Tags: Channel Coding, channel-coding cost-constrained sphere-packing exp, continuous channel, continuous memoryless channel, cost constraint, error probability, hypothesis testing, Lead, Memoryless systems, Optimization, per-codeword cost constraint, reliability function, spherepacking exponent, Testing) @inproceedings{Vazquez-Vilar2015, title = {A derivation of the Cost-Constrained Sphere-Packing Exponent}, author = {Vazquez-Vilar, Gonzalo and Martinez, Alfonso and Guillen i Fabregas, Albert}, url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=7282591}, doi = {10.1109/ISIT.2015.7282591}, isbn = {978-1-4673-7704-1}, year = {2015}, date = {2015-06-01}, booktitle = {2015 IEEE International Symposium on Information Theory (ISIT)}, pages = {929--933}, publisher = {IEEE}, address = {Hong Kong}, keywords = {Channel Coding, channel-coding cost-constrained sphere-packing exp, continuous channel, continuous memoryless channel, cost constraint, error probability, hypothesis testing, Lead, Memoryless systems, Optimization, per-codeword cost constraint, reliability function, spherepacking exponent, Testing}, pubstate = {published}, tppubtype = {inproceedings} } |

## 2009 |

## Inproceedings |

Djuric, Petar; Miguez, Joaquin Model Assessment with Kolmogorov-Smirnov Statistics (Inproceeding) 2009 IEEE International Conference on Acoustics, Speech and Signal Processing, pp. 2973–2976, IEEE, Taipei, 2009, ISSN: 1520-6149. (Abstract | Links | BibTeX | Tags: Bayesian methods, Computer Simulation, Context modeling, Electronic mail, Filtering, ill-conditioned problem, Kolmogorov-Smirnov statistics, model assessment, modelling, Predictive models, Probability, statistical analysis, statistics, Testing) @inproceedings{Djuric2009, title = {Model Assessment with Kolmogorov-Smirnov Statistics}, author = {Djuric, Petar M. and Miguez, Joaquin}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4960248}, issn = {1520-6149}, year = {2009}, date = {2009-01-01}, booktitle = {2009 IEEE International Conference on Acoustics, Speech and Signal Processing}, pages = {2973--2976}, publisher = {IEEE}, address = {Taipei}, abstract = {One of the most basic problems in science and engineering is the assessment of a considered model. The model should describe a set of observed data and the objective is to find ways of deciding if the model should be rejected. It seems that this is an ill-conditioned problem because we have to test the model against all the possible alternative models. In this paper we use the Kolmogorov-Smirnov statistic to develop a test that shows if the model should be kept or it should be rejected. We explain how this testing can be implemented in the context of particle filtering. We demonstrate the performance of the proposed method by computer simulations.}, keywords = {Bayesian methods, Computer Simulation, Context modeling, Electronic mail, Filtering, ill-conditioned problem, Kolmogorov-Smirnov statistics, model assessment, modelling, Predictive models, Probability, statistical analysis, statistics, Testing}, pubstate = {published}, tppubtype = {inproceedings} } One of the most basic problems in science and engineering is the assessment of a considered model. The model should describe a set of observed data and the objective is to find ways of deciding if the model should be rejected. It seems that this is an ill-conditioned problem because we have to test the model against all the possible alternative models. In this paper we use the Kolmogorov-Smirnov statistic to develop a test that shows if the model should be kept or it should be rejected. We explain how this testing can be implemented in the context of particle filtering. We demonstrate the performance of the proposed method by computer simulations. |

Maiz, Cristina; Miguez, Joaquin; Djuric, Petar Particle Filtering in the Presence of Outliers (Inproceeding) 2009 IEEE/SP 15th Workshop on Statistical Signal Processing, pp. 33–36, IEEE, Cardiff, 2009, ISBN: 978-1-4244-2709-3. (Abstract | Links | BibTeX | Tags: computer simulations, Degradation, Filtering, multidimensional random variates, Multidimensional signal processing, Multidimensional systems, Nonlinear tracking, Outlier detection, predictive distributions, Signal processing, signal processing tools, signal-power observations, spatial depth, statistical analysis, statistical distributions, statistics, Target tracking, Testing) @inproceedings{Maiz2009, title = {Particle Filtering in the Presence of Outliers}, author = {Maiz, Cristina S. and Miguez, Joaquin and Djuric, Petar M.}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5278645}, isbn = {978-1-4244-2709-3}, year = {2009}, date = {2009-01-01}, booktitle = {2009 IEEE/SP 15th Workshop on Statistical Signal Processing}, pages = {33--36}, publisher = {IEEE}, address = {Cardiff}, abstract = {Particle filters have become very popular signal processing tools for problems that involve nonlinear tracking of an unobserved signal of interest given a series of related observations. In this paper we propose a new scheme for particle filtering when the observed data are possibly contaminated with outliers. An outlier is an observation that has been generated by some (unknown) mechanism different from the assumed model of the data. Therefore, when handled in the same way as regular observations, outliers may drastically degrade the performance of the particle filter. To address this problem, we introduce an auxiliary particle filtering scheme that incorporates an outlier detection step. We propose to implement it by means of a test involving statistics of the predictive distributions of the observations. Specifically, we investigate the use of a proposed statistic called spatial depth that can easily be applied to multidimensional random variates. The performance of the resulting algorithm is assessed by computer simulations of target tracking based on signal-power observations.}, keywords = {computer simulations, Degradation, Filtering, multidimensional random variates, Multidimensional signal processing, Multidimensional systems, Nonlinear tracking, Outlier detection, predictive distributions, Signal processing, signal processing tools, signal-power observations, spatial depth, statistical analysis, statistical distributions, statistics, Target tracking, Testing}, pubstate = {published}, tppubtype = {inproceedings} } Particle filters have become very popular signal processing tools for problems that involve nonlinear tracking of an unobserved signal of interest given a series of related observations. In this paper we propose a new scheme for particle filtering when the observed data are possibly contaminated with outliers. An outlier is an observation that has been generated by some (unknown) mechanism different from the assumed model of the data. Therefore, when handled in the same way as regular observations, outliers may drastically degrade the performance of the particle filter. To address this problem, we introduce an auxiliary particle filtering scheme that incorporates an outlier detection step. We propose to implement it by means of a test involving statistics of the predictive distributions of the observations. Specifically, we investigate the use of a proposed statistic called spatial depth that can easily be applied to multidimensional random variates. The performance of the resulting algorithm is assessed by computer simulations of target tracking based on signal-power observations. |

Martino, Luca; Miguez, Joaquin An Adaptive Accept/Reject Sampling Algorithm for Posterior Probability Distributions (Inproceeding) 2009 IEEE/SP 15th Workshop on Statistical Signal Processing, pp. 45–48, IEEE, Cardiff, 2009, ISBN: 978-1-4244-2709-3. (Abstract | Links | BibTeX | Tags: adaptive accept/reject sampling, Adaptive rejection sampling, arbitrary target probability distributions, Computer Simulation, Filtering, Monte Carlo integration, Monte Carlo methods, posterior probability distributions, Probability, Probability density function, Probability distribution, Proposals, Rejection sampling, Sampling methods, sensor networks, Signal processing algorithms, signal sampling, Testing) @inproceedings{Martino2009b, title = {An Adaptive Accept/Reject Sampling Algorithm for Posterior Probability Distributions}, author = {Martino, Luca and Miguez, Joaquin}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5278644}, isbn = {978-1-4244-2709-3}, year = {2009}, date = {2009-01-01}, booktitle = {2009 IEEE/SP 15th Workshop on Statistical Signal Processing}, pages = {45--48}, publisher = {IEEE}, address = {Cardiff}, abstract = {Accept/reject sampling is a well-known method to generate random samples from arbitrary target probability distributions. It demands the design of a suitable proposal probability density function (pdf) from which candidate samples can be drawn. These samples are either accepted or rejected depending on a test involving the ratio of the target and proposal densities. In this paper we introduce an adaptive method to build a sequence of proposal pdf's that approximate the target density and hence can ensure a high acceptance rate. In order to illustrate the application of the method we design an accept/reject particle filter and then assess its performance and sampling efficiency numerically, by means of computer simulations.}, keywords = {adaptive accept/reject sampling, Adaptive rejection sampling, arbitrary target probability distributions, Computer Simulation, Filtering, Monte Carlo integration, Monte Carlo methods, posterior probability distributions, Probability, Probability density function, Probability distribution, Proposals, Rejection sampling, Sampling methods, sensor networks, Signal processing algorithms, signal sampling, Testing}, pubstate = {published}, tppubtype = {inproceedings} } Accept/reject sampling is a well-known method to generate random samples from arbitrary target probability distributions. It demands the design of a suitable proposal probability density function (pdf) from which candidate samples can be drawn. These samples are either accepted or rejected depending on a test involving the ratio of the target and proposal densities. In this paper we introduce an adaptive method to build a sequence of proposal pdf's that approximate the target density and hence can ensure a high acceptance rate. In order to illustrate the application of the method we design an accept/reject particle filter and then assess its performance and sampling efficiency numerically, by means of computer simulations. |

## 2008 |

## Inproceedings |

Santiago-Mozos, Ricardo; Fernandez-Lorenzana,; Perez-Cruz, Fernando; Artés-Rodríguez, Antonio On the Uncertainty in Sequential Hypothesis Testing (Inproceeding) 2008 5th IEEE International Symposium on Biomedical Imaging: From Nano to Macro, pp. 1223–1226, IEEE, Paris, 2008, ISBN: 978-1-4244-2002-5. (Abstract | Links | BibTeX | Tags: binary hypothesis test, Biomedical imaging, Detectors, H infinity control, likelihood ratio, Medical diagnostic imaging, medical image application, medical image processing, Medical tests, patient diagnosis, Probability, Random variables, Sequential analysis, sequential hypothesis testing, sequential probability ratio test, Signal processing, Testing, tuberculosis diagnosis, Uncertainty) @inproceedings{Santiago-Mozos2008, title = {On the Uncertainty in Sequential Hypothesis Testing}, author = {Santiago-Mozos, Ricardo and Fernandez-Lorenzana, R. and Perez-Cruz, Fernando and Artés-Rodríguez, Antonio}, url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=4541223}, isbn = {978-1-4244-2002-5}, year = {2008}, date = {2008-01-01}, booktitle = {2008 5th IEEE International Symposium on Biomedical Imaging: From Nano to Macro}, pages = {1223--1226}, publisher = {IEEE}, address = {Paris}, abstract = {We consider the problem of sequential hypothesis testing when the exact pdfs are not known but instead a set of iid samples are used to describe the hypotheses. We modify the classical test by introducing a likelihood ratio interval which accommodates the uncertainty in the pdfs. The test finishes when the whole likelihood ratio interval crosses one of the thresholds and reduces to the classical test as the number of samples to describe the hypotheses tend to infinity. We illustrate the performance of this test in a medical image application related to tuberculosis diagnosis. We show in this example how the test confidence level can be accurately determined.}, keywords = {binary hypothesis test, Biomedical imaging, Detectors, H infinity control, likelihood ratio, Medical diagnostic imaging, medical image application, medical image processing, Medical tests, patient diagnosis, Probability, Random variables, Sequential analysis, sequential hypothesis testing, sequential probability ratio test, Signal processing, Testing, tuberculosis diagnosis, Uncertainty}, pubstate = {published}, tppubtype = {inproceedings} } We consider the problem of sequential hypothesis testing when the exact pdfs are not known but instead a set of iid samples are used to describe the hypotheses. We modify the classical test by introducing a likelihood ratio interval which accommodates the uncertainty in the pdfs. The test finishes when the whole likelihood ratio interval crosses one of the thresholds and reduces to the classical test as the number of samples to describe the hypotheses tend to infinity. We illustrate the performance of this test in a medical image application related to tuberculosis diagnosis. We show in this example how the test confidence level can be accurately determined. |